forked from p85947160/gitea
Compare commits
200 Commits
main
...
release/v1
Author | SHA1 | Date |
---|---|---|
Lauris BH | 34fe3d390b | |
silverwind | ce51c2bdf6 | |
6543 | 7937f1463a | |
6543 | dbe9c11238 | |
silverwind | 313ace93d0 | |
zeripath | f79a2e193f | |
6543 | 5d4251eb78 | |
pvgoran | 88008b681d | |
guillep2k | 5462fdcbbd | |
guillep2k | 161e550200 | |
Cirno the Strongest | 95af6096fb | |
6543 | 801f4b9e7a | |
silverwind | c0c3a533a0 | |
6543 | ed646078e1 | |
zeripath | dc0ea133e1 | |
Gary Kim | a854846f06 | |
silverwind | b52e8de7de | |
zeripath | 1b62916393 | |
pvgoran | 1d57c309ef | |
6543 | cf97e65b66 | |
zeripath | 42a46cff35 | |
zeripath | 2cb3db2d20 | |
zeripath | 04e480d477 | |
zeripath | de9a96c4de | |
guillep2k | 878434146f | |
6543 | d78be7ddf9 | |
6543 | 83f8414e1e | |
guillep2k | 0b216f40fd | |
zeripath | dd6e604f8f | |
zeripath | 86863ae939 | |
zeripath | f3a90057a5 | |
6543 | 03fdd82d63 | |
6543 | cd7fa15d1d | |
6543 | 79868d7096 | |
zeripath | 19626b93f8 | |
zeripath | 91e6a7f7ea | |
guillep2k | ff7eaa1eb4 | |
Kyle Evans | 5131206aad | |
6543 | bfc25fcf40 | |
zeripath | 4a6765fba2 | |
zeripath | dca8ef9407 | |
zeripath | cebef5c871 | |
6543 | 245d6ebda5 | |
zeripath | d9875ff2e1 | |
6543 | cc2a6c1d30 | |
赵智超 | b5fd55de73 | |
6543 | e11b3a1076 | |
6543 | 0c4be64345 | |
zeripath | c34ad62eea | |
6543 | f7d7cf4e2d | |
zeripath | 99a364a9dc | |
6543 | 3afbbfe921 | |
zeripath | bfce841b04 | |
Lunny Xiao | 139fc7cfee | |
zeripath | 596eebb2b6 | |
zeripath | 1d5d745851 | |
zeripath | 3dabfd4933 | |
zeripath | 6ee6731290 | |
Lunny Xiao | 602fe45936 | |
Lauris BH | e2da9cd21f | |
Lauris BH | c0b917b7eb | |
John Olheiser | 54ea58ddf0 | |
Lunny Xiao | 0158725387 | |
Lunny Xiao | f3cacf1332 | |
techknowlogick | a15dc93011 | |
zeripath | 66b31786d3 | |
Lunny Xiao | 931ddfec6d | |
Lunny Xiao | 7e0a5b17db | |
zeripath | 07688231c2 | |
John Olheiser | 21eaeb8418 | |
Lauris BH | 9a929ad17f | |
zeripath | c19ac41b34 | |
Lunny Xiao | fd85d31cb4 | |
guillep2k | c9e4d7a564 | |
techknowlogick | 9990430e32 | |
John Olheiser | 6f5656ab0e | |
guillep2k | e4a876cee1 | |
guillep2k | abb534ba7a | |
James Lakin | 65dceb6a40 | |
6543 | db26f0aca9 | |
Andreas Shimokawa | 76878fd69b | |
zeripath | 3444fa2dd7 | |
zeripath | caa2aeaa52 | |
6543 | 11300ee582 | |
John Olheiser | c6b78c3d31 | |
zeripath | 4c40aa5be9 | |
6543 | 50f2e90b76 | |
zeripath | 5d11ccc9e1 | |
guillep2k | 93860af542 | |
James Lakin | 7bf5834f2c | |
John Olheiser | 1fbdd9335f | |
John Olheiser | e9061a537c | |
John Olheiser | ed664a9e1d | |
John Olheiser | 4cb18601ff | |
oscar.lofwenhamn | 3abb25166c | |
6543 | 9e6ad64d48 | |
Lunny Xiao | b51d7c459e | |
zeripath | d3b6f001fe | |
silverwind | e938f1d945 | |
guillep2k | 7284327a00 | |
guillep2k | 919f3f11e2 | |
guillep2k | 3cee15e6f9 | |
Lunny Xiao | 34e3644ada | |
guillep2k | 14bd120cdc | |
6543 | 3e40f8bebc | |
zeripath | df5f1d9dca | |
John Olheiser | 457ee1ab5a | |
zeripath | 4f64688902 | |
zeripath | 117dcf1c02 | |
mrsdizzie | 4529a262c0 | |
zeripath | ff24f81a05 | |
Antoine GIRARD | e3cb4f9d0e | |
zeripath | 9b7890f1cc | |
Antoine GIRARD | eb064dfda2 | |
zeripath | f9e66e5a46 | |
zeripath | ef89260cf1 | |
zeripath | 315d928626 | |
guillep2k | 5525452bdf | |
Lunny Xiao | 987cd277f6 | |
zeripath | 5cdfde2ebf | |
jaqra | 1cd6233cef | |
James Lakin | cb81e39f7a | |
Lauris BH | 8efd6b32e2 | |
6543 | c95d9603ea | |
John Olheiser | 9169b39458 | |
Lauris BH | 80eb50655a | |
6543 | b16c555541 | |
guillep2k | b5b44364e3 | |
guillep2k | 6af58022c8 | |
guillep2k | e48b460a0a | |
John Olheiser | 2cd2614eaa | |
6543 | 0129e76ef5 | |
6543 | 6896dad675 | |
zeripath | 1ed4323005 | |
Lunny Xiao | 049af0d3d0 | |
6543 | f5727d83dd | |
Lauris BH | 912ce27421 | |
6543 | b3549bb5ec | |
Lunny Xiao | 491cbeca67 | |
zeripath | 895d92ffe5 | |
zeripath | 4b11f967bd | |
Lunny Xiao | 1e73dd2446 | |
6543 | 315026c2c5 | |
Lunny Xiao | 16dfd9ffbe | |
Lunny Xiao | 16f7b43903 | |
techknowlogick | 043febdbc9 | |
guillep2k | 60f91d56f0 | |
guillep2k | 16fc15ae6a | |
techknowlogick | ef8f6d99f1 | |
John Olheiser | 4b688135f9 | |
John Olheiser | e24861a546 | |
6543 | 128cc34344 | |
Lauris BH | f82a805478 | |
Lunny Xiao | 0dced15c1a | |
John Olheiser | db9342c854 | |
zeripath | 79c1d48532 | |
zeripath | 05b9864086 | |
zeripath | ff508c9c9b | |
Lunny Xiao | f96c1a2c79 | |
6543 | ce756ee89f | |
zeripath | f2e9d4b851 | |
zeripath | e878d743f4 | |
6543 | 3fa14d89a2 | |
zeripath | bcb722daec | |
Lunny Xiao | 8add1dfacc | |
David Svantesson | aa6ed1b7c1 | |
6543 | 95cb921097 | |
6543 | 6730df9e8c | |
Moritz | b577500a54 | |
Lunny Xiao | fe46185407 | |
Lunny Xiao | 69a2a29c33 | |
Lunny Xiao | f766719895 | |
Lunny Xiao | e2ddc42377 | |
John Olheiser | 3521177a34 | |
Lunny Xiao | c8bb0ecf52 | |
zeripath | dbe6136348 | |
6543 | 6d1f7e90cf | |
David Svantesson | 42663a687c | |
6543 | 73c90c26d4 | |
John Olheiser | c579ad92b5 | |
6543 | 602c5da953 | |
6543 | 1980e59ac2 | |
Lunny Xiao | 28508792ba | |
silverwind | 3e23dad075 | |
zeripath | b13b9d3dbd | |
zeripath | 4072f28e60 | |
6543 | dbeef6bb02 | |
silverwind | fec35440db | |
zeripath | f8ea50cc7a | |
zeripath | 0e53a16cca | |
zeripath | 7eaba6ba8a | |
guillep2k | ff16099c6d | |
John Olheiser | a516a7ba0f | |
silverwind | 11bce6fd3d | |
techknowlogick | 3fb906dc02 | |
zeripath | 3a00a690c9 | |
John Olheiser | a2b7cc1bb1 | |
John Olheiser | 04a77b1f42 | |
John Olheiser | f523372d07 | |
6543 | e39c238ef4 |
|
@ -1,44 +1,57 @@
|
|||
# The full repository name
|
||||
repo: go-gitea/gitea
|
||||
|
||||
# Service type (gitea or github)
|
||||
service: github
|
||||
|
||||
# Base URL for Gitea instance if using gitea service type (optional)
|
||||
# Default: https://gitea.com
|
||||
base-url:
|
||||
|
||||
# Changelog groups and which labeled PRs to add to each group
|
||||
groups:
|
||||
-
|
||||
-
|
||||
name: BREAKING
|
||||
labels:
|
||||
- kind/breaking
|
||||
-
|
||||
-
|
||||
name: FEATURE
|
||||
labels:
|
||||
- kind/feature
|
||||
-
|
||||
name: SECURITY
|
||||
labels:
|
||||
- kind/security
|
||||
-
|
||||
name: BUGFIXES
|
||||
labels:
|
||||
- kind/bug
|
||||
-
|
||||
-
|
||||
name: ENHANCEMENT
|
||||
labels:
|
||||
- kind/enhancement
|
||||
- kind/refactor
|
||||
- kind/ui
|
||||
-
|
||||
name: SECURITY
|
||||
labels:
|
||||
- kind/security
|
||||
-
|
||||
name: TESTING
|
||||
labels:
|
||||
- kind/testing
|
||||
-
|
||||
-
|
||||
name: TRANSLATION
|
||||
labels:
|
||||
- kind/translation
|
||||
-
|
||||
-
|
||||
name: BUILD
|
||||
labels:
|
||||
- kind/build
|
||||
- kind/lint
|
||||
-
|
||||
-
|
||||
name: DOCS
|
||||
labels:
|
||||
- kind/docs
|
||||
-
|
||||
-
|
||||
name: MISC
|
||||
default: true
|
||||
default: true
|
||||
|
||||
# regex indicating which labels to skip for the changelog
|
||||
skip-labels: skip-changelog|backport\/.+
|
||||
|
|
413
.drone.yml
413
.drone.yml
|
@ -1,6 +1,61 @@
|
|||
---
|
||||
kind: pipeline
|
||||
name: testing
|
||||
name: compliance
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: arm64
|
||||
|
||||
workspace:
|
||||
base: /go
|
||||
path: src/code.gitea.io/gitea
|
||||
|
||||
steps:
|
||||
- name: pre-build
|
||||
pull: always
|
||||
image: node:10 # this step is kept at the lowest version of node that we support
|
||||
commands:
|
||||
- make css
|
||||
- make js
|
||||
|
||||
- name: build-without-gcc
|
||||
pull: always
|
||||
image: golang:1.11 # this step is kept as the lowest version of golang that we support
|
||||
environment:
|
||||
GO111MODULE: on
|
||||
GOPROXY: off
|
||||
commands:
|
||||
- go build -mod=vendor -o gitea_no_gcc # test if build succeeds without the sqlite tag
|
||||
|
||||
- name: build-linux-386
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
environment:
|
||||
GO111MODULE: on
|
||||
GOPROXY: off
|
||||
GOOS: linux
|
||||
GOARCH: 386
|
||||
commands:
|
||||
- go build -mod=vendor -o gitea_linux_386 # test if compatible with 32 bit
|
||||
|
||||
- name: check
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
commands:
|
||||
- make clean
|
||||
- make golangci-lint
|
||||
- make revive
|
||||
- make swagger-check
|
||||
- make swagger-validate
|
||||
- make test-vendor
|
||||
environment:
|
||||
GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not
|
||||
GOSUMDB: sum.golang.org
|
||||
TAGS: bindata sqlite sqlite_unlock_notify
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
name: testing-amd64
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
|
@ -25,15 +80,9 @@ services:
|
|||
MYSQL_ALLOW_EMPTY_PASSWORD: yes
|
||||
MYSQL_DATABASE: testgitea
|
||||
|
||||
- name: pgsql
|
||||
pull: default
|
||||
image: postgres:9.5
|
||||
environment:
|
||||
POSTGRES_DB: test
|
||||
|
||||
- name: mssql
|
||||
pull: default
|
||||
image: microsoft/mssql-server-linux:latest
|
||||
image: mcr.microsoft.com/mssql/server:latest
|
||||
environment:
|
||||
ACCEPT_EULA: Y
|
||||
MSSQL_PID: Standard
|
||||
|
@ -54,52 +103,23 @@ steps:
|
|||
exclude:
|
||||
- pull_request
|
||||
|
||||
- name: pre-build
|
||||
pull: always
|
||||
image: node:10 # this step is kept at the lowest version of node that we support
|
||||
commands:
|
||||
- make css
|
||||
- make js
|
||||
|
||||
- name: build-without-gcc
|
||||
pull: always
|
||||
image: golang:1.11 # this step is kept as the lowest version of golang that we support
|
||||
environment:
|
||||
GO111MODULE: on
|
||||
GOPROXY: off
|
||||
commands:
|
||||
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
||||
- go build -mod=vendor -o gitea_no_gcc # test if build succeeds without the sqlite tag
|
||||
|
||||
- name: build-linux-386
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
environment:
|
||||
GO111MODULE: on
|
||||
GOPROXY: off
|
||||
GOOS: linux
|
||||
GOARCH: 386
|
||||
commands:
|
||||
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
||||
- go build -mod=vendor -o gitea_linux_386 # test if compatible with 32 bit
|
||||
|
||||
- name: build
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
commands:
|
||||
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
||||
- make clean
|
||||
- make golangci-lint
|
||||
- make revive
|
||||
- make swagger-check
|
||||
- make swagger-validate
|
||||
- make test-vendor
|
||||
- make build
|
||||
environment:
|
||||
GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not
|
||||
GOSUMDB: sum.golang.org
|
||||
TAGS: bindata sqlite sqlite_unlock_notify
|
||||
|
||||
- name: tag-pre-condition
|
||||
pull: always
|
||||
image: alpine/git
|
||||
commands:
|
||||
- git update-ref refs/heads/tag_test ${DRONE_COMMIT_SHA}
|
||||
|
||||
- name: unit-test
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
|
@ -108,70 +128,8 @@ steps:
|
|||
environment:
|
||||
GOPROXY: off
|
||||
TAGS: bindata sqlite sqlite_unlock_notify
|
||||
depends_on:
|
||||
- build
|
||||
when:
|
||||
branch:
|
||||
- master
|
||||
event:
|
||||
- push
|
||||
- pull_request
|
||||
|
||||
- name: release-test
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
commands:
|
||||
- make test
|
||||
environment:
|
||||
GOPROXY: off
|
||||
TAGS: bindata sqlite sqlite_unlock_notify
|
||||
depends_on:
|
||||
- build
|
||||
when:
|
||||
branch:
|
||||
- "release/*"
|
||||
event:
|
||||
- push
|
||||
- pull_request
|
||||
|
||||
- name: tag-pre-condition
|
||||
pull: always
|
||||
image: alpine/git
|
||||
commands:
|
||||
- git update-ref refs/heads/tag_test ${DRONE_COMMIT_SHA}
|
||||
depends_on:
|
||||
- build
|
||||
when:
|
||||
event:
|
||||
- tag
|
||||
|
||||
- name: tag-test
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
commands:
|
||||
- make test
|
||||
environment:
|
||||
GOPROXY: off
|
||||
TAGS: bindata
|
||||
depends_on:
|
||||
- tag-pre-condition
|
||||
when:
|
||||
event:
|
||||
- tag
|
||||
|
||||
- name: test-sqlite
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
commands:
|
||||
- "curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash"
|
||||
- apt-get install -y git-lfs
|
||||
- timeout -s ABRT 20m make test-sqlite-migration
|
||||
- timeout -s ABRT 20m make test-sqlite
|
||||
environment:
|
||||
GOPROXY: off
|
||||
TAGS: bindata
|
||||
depends_on:
|
||||
- build
|
||||
GITHUB_READ_TOKEN:
|
||||
from_secret: github_read_token
|
||||
|
||||
- name: test-mysql
|
||||
pull: always
|
||||
|
@ -187,30 +145,6 @@ steps:
|
|||
TEST_LDAP: 1
|
||||
depends_on:
|
||||
- build
|
||||
when:
|
||||
branch:
|
||||
- master
|
||||
event:
|
||||
- push
|
||||
- pull_request
|
||||
|
||||
- name: tag-test-mysql
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
commands:
|
||||
- "curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash"
|
||||
- apt-get install -y git-lfs
|
||||
- timeout -s ABRT 20m make test-mysql-migration
|
||||
- timeout -s ABRT 20m make test-mysql
|
||||
environment:
|
||||
GOPROXY: off
|
||||
TAGS: bindata
|
||||
TEST_LDAP: 1
|
||||
depends_on:
|
||||
- build
|
||||
when:
|
||||
event:
|
||||
- tag
|
||||
|
||||
- name: test-mysql8
|
||||
pull: always
|
||||
|
@ -227,21 +161,6 @@ steps:
|
|||
depends_on:
|
||||
- build
|
||||
|
||||
- name: test-pgsql
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
commands:
|
||||
- "curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash"
|
||||
- apt-get install -y git-lfs
|
||||
- timeout -s ABRT 20m make test-pgsql-migration
|
||||
- timeout -s ABRT 20m make test-pgsql
|
||||
environment:
|
||||
GOPROXY: off
|
||||
TAGS: bindata
|
||||
TEST_LDAP: 1
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
- name: test-mssql
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
|
@ -293,13 +212,90 @@ steps:
|
|||
- push
|
||||
- pull_request
|
||||
|
||||
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
name: testing-arm64
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: arm64
|
||||
|
||||
workspace:
|
||||
base: /go
|
||||
path: src/code.gitea.io/gitea
|
||||
|
||||
services:
|
||||
- name: pgsql
|
||||
pull: default
|
||||
image: postgres:9.5
|
||||
environment:
|
||||
POSTGRES_DB: test
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
- name: ldap
|
||||
pull: default
|
||||
image: gitea/test-openldap:latest
|
||||
|
||||
steps:
|
||||
- name: fetch-tags
|
||||
pull: default
|
||||
image: docker:git
|
||||
commands:
|
||||
- git fetch --tags --force
|
||||
when:
|
||||
event:
|
||||
exclude:
|
||||
- pull_request
|
||||
|
||||
- name: build
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
commands:
|
||||
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
||||
- make build
|
||||
environment:
|
||||
GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not
|
||||
GOSUMDB: sum.golang.org
|
||||
TAGS: bindata sqlite sqlite_unlock_notify
|
||||
|
||||
- name: test-sqlite
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
commands:
|
||||
- "curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash"
|
||||
- apt-get install -y git-lfs
|
||||
- timeout -s ABRT 20m make test-sqlite-migration
|
||||
- timeout -s ABRT 20m make test-sqlite
|
||||
environment:
|
||||
GOPROXY: off
|
||||
TAGS: bindata
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
- name: test-pgsql
|
||||
pull: always
|
||||
image: golang:1.13
|
||||
commands:
|
||||
- "curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash"
|
||||
- apt-get install -y git-lfs
|
||||
- timeout -s ABRT 20m make test-pgsql-migration
|
||||
- timeout -s ABRT 20m make test-pgsql
|
||||
environment:
|
||||
GOPROXY: off
|
||||
TAGS: bindata
|
||||
TEST_LDAP: 1
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
name: translations
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: amd64
|
||||
arch: arm64
|
||||
|
||||
workspace:
|
||||
base: /go
|
||||
|
@ -378,7 +374,8 @@ trigger:
|
|||
- push
|
||||
|
||||
depends_on:
|
||||
- testing
|
||||
- testing-amd64
|
||||
- testing-arm64
|
||||
- translations
|
||||
|
||||
steps:
|
||||
|
@ -390,7 +387,7 @@ steps:
|
|||
|
||||
- name: static
|
||||
pull: always
|
||||
image: techknowlogick/xgo:latest
|
||||
image: techknowlogick/xgo:go-1.13.x
|
||||
commands:
|
||||
- apt update && apt -y install curl
|
||||
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
||||
|
@ -476,7 +473,8 @@ trigger:
|
|||
- tag
|
||||
|
||||
depends_on:
|
||||
- testing
|
||||
- testing-arm64
|
||||
- testing-amd64
|
||||
|
||||
steps:
|
||||
- name: fetch-tags
|
||||
|
@ -487,7 +485,7 @@ steps:
|
|||
|
||||
- name: static
|
||||
pull: always
|
||||
image: techknowlogick/xgo:latest
|
||||
image: techknowlogick/xgo:go-1.13.x
|
||||
commands:
|
||||
- apt update && apt -y install curl
|
||||
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
||||
|
@ -545,17 +543,14 @@ name: docs
|
|||
|
||||
platform:
|
||||
os: linux
|
||||
arch: amd64
|
||||
|
||||
workspace:
|
||||
base: /go
|
||||
path: src/code.gitea.io/gitea
|
||||
arch: arm64
|
||||
|
||||
steps:
|
||||
- name: build-docs
|
||||
pull: always
|
||||
image: webhippie/hugo:latest
|
||||
image: plugins/hugo:latest
|
||||
commands:
|
||||
- apk add --no-cache make bash curl
|
||||
- cd docs
|
||||
- make trans-copy
|
||||
- make clean
|
||||
|
@ -563,7 +558,7 @@ steps:
|
|||
|
||||
- name: publish-docs
|
||||
pull: always
|
||||
image: lucap/drone-netlify:latest
|
||||
image: techknowlogick/drone-netlify:latest
|
||||
settings:
|
||||
path: docs/public/
|
||||
site_id: d2260bae-7861-4c02-8646-8f6440b12672
|
||||
|
@ -578,7 +573,7 @@ steps:
|
|||
|
||||
---
|
||||
kind: pipeline
|
||||
name: docker-linux-amd64
|
||||
name: docker-linux-amd64-release
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
|
@ -589,13 +584,13 @@ workspace:
|
|||
path: src/code.gitea.io/gitea
|
||||
|
||||
depends_on:
|
||||
- testing
|
||||
- testing-amd64
|
||||
- testing-arm64
|
||||
|
||||
trigger:
|
||||
ref:
|
||||
- refs/heads/master
|
||||
- "refs/tags/**"
|
||||
- "refs/pull/**"
|
||||
|
||||
steps:
|
||||
- name: fetch-tags
|
||||
|
@ -603,23 +598,6 @@ steps:
|
|||
image: docker:git
|
||||
commands:
|
||||
- git fetch --tags --force
|
||||
when:
|
||||
event:
|
||||
exclude:
|
||||
- pull_request
|
||||
|
||||
- name: dryrun
|
||||
pull: always
|
||||
image: plugins/docker:linux-amd64
|
||||
settings:
|
||||
dry_run: true
|
||||
repo: gitea/gitea
|
||||
tags: linux-amd64
|
||||
build_args:
|
||||
- GOPROXY=off
|
||||
when:
|
||||
event:
|
||||
- pull_request
|
||||
|
||||
- name: publish
|
||||
pull: always
|
||||
|
@ -641,7 +619,7 @@ steps:
|
|||
|
||||
---
|
||||
kind: pipeline
|
||||
name: docker-linux-arm64
|
||||
name: docker-linux-arm64-dry-run
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
|
@ -652,25 +630,13 @@ workspace:
|
|||
path: src/code.gitea.io/gitea
|
||||
|
||||
depends_on:
|
||||
- testing
|
||||
- compliance
|
||||
|
||||
trigger:
|
||||
ref:
|
||||
- refs/heads/master
|
||||
- "refs/tags/**"
|
||||
- "refs/pull/**"
|
||||
|
||||
steps:
|
||||
- name: fetch-tags
|
||||
pull: default
|
||||
image: docker:git
|
||||
commands:
|
||||
- git fetch --tags --force
|
||||
when:
|
||||
event:
|
||||
exclude:
|
||||
- pull_request
|
||||
|
||||
- name: dryrun
|
||||
pull: always
|
||||
image: plugins/docker:linux-arm64
|
||||
|
@ -684,6 +650,33 @@ steps:
|
|||
event:
|
||||
- pull_request
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
name: docker-linux-arm64-release
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: arm64
|
||||
|
||||
workspace:
|
||||
base: /go
|
||||
path: src/code.gitea.io/gitea
|
||||
|
||||
depends_on:
|
||||
- testing-amd64
|
||||
- testing-arm64
|
||||
|
||||
trigger:
|
||||
ref:
|
||||
- refs/heads/master
|
||||
- "refs/tags/**"
|
||||
steps:
|
||||
- name: fetch-tags
|
||||
pull: default
|
||||
image: docker:git
|
||||
commands:
|
||||
- git fetch --tags --force
|
||||
|
||||
- name: publish
|
||||
pull: always
|
||||
image: plugins/docker:linux-arm64
|
||||
|
@ -729,45 +722,49 @@ trigger:
|
|||
- "refs/tags/**"
|
||||
|
||||
depends_on:
|
||||
- docker-linux-amd64
|
||||
- docker-linux-arm64
|
||||
- docker-linux-amd64-release
|
||||
- docker-linux-arm64-release
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
name: notify
|
||||
name: notifications
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: amd64
|
||||
|
||||
workspace:
|
||||
base: /go
|
||||
path: src/code.gitea.io/gitea
|
||||
arch: arm64
|
||||
|
||||
clone:
|
||||
disable: true
|
||||
|
||||
when:
|
||||
trigger:
|
||||
branch:
|
||||
- master
|
||||
- "release/*"
|
||||
event:
|
||||
- push
|
||||
- tag
|
||||
status:
|
||||
- success
|
||||
- failure
|
||||
|
||||
depends_on:
|
||||
- testing
|
||||
- testing-amd64
|
||||
- testing-arm64
|
||||
- translations
|
||||
- release-version
|
||||
- release-master
|
||||
- docker-linux-amd64
|
||||
- docker-linux-arm64
|
||||
- docker-linux-amd64-release
|
||||
- docker-linux-arm64-release
|
||||
- docker-manifest
|
||||
- docs
|
||||
|
||||
steps:
|
||||
- name: discord
|
||||
pull: always
|
||||
image: appleboy/drone-discord:1.0.0
|
||||
environment:
|
||||
DISCORD_WEBHOOK_ID:
|
||||
image: appleboy/drone-discord:1.2.4
|
||||
settings:
|
||||
message: "{{#success build.status}} ✅ Build #{{build.number}} of `{{repo.name}}` succeeded.\n\n📝 Commit by {{commit.author}} on `{{commit.branch}}`:\n``` {{commit.message}} ```\n\n🌐 {{ build.link }} {{else}} ❌ Build #{{build.number}} of `{{repo.name}}` failed.\n\n📝 Commit by {{commit.author}} on `{{commit.branch}}`:\n``` {{commit.message}} ```\n\n🌐 {{ build.link }} {{/success}}\n"
|
||||
webhook_id:
|
||||
from_secret: discord_webhook_id
|
||||
DISCORD_WEBHOOK_TOKEN:
|
||||
webhook_token:
|
||||
from_secret: discord_webhook_token
|
||||
|
|
|
@ -69,6 +69,7 @@ coverage.all
|
|||
/yarn.lock
|
||||
/public/js
|
||||
/public/css
|
||||
/VERSION
|
||||
|
||||
# Snapcraft
|
||||
snap/.snapcraft/
|
||||
|
|
274
CHANGELOG.md
274
CHANGELOG.md
|
@ -4,13 +4,165 @@ This changelog goes through all the changes that have been made in each release
|
|||
without substantial changes to our git log; to see the highlights of what has
|
||||
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||
|
||||
## [1.11.0-RC1](https://github.com/go-gitea/gitea/releases/tag/v1.11.0-rc1) - 2020-01-07
|
||||
## [1.11.8](https://github.com/go-gitea/gitea/releases/tag/v1.11.8) - 2020-06-21
|
||||
|
||||
* BUGFIXES
|
||||
* Really fix __webpack_public_path__ for 1.11 (#11961)
|
||||
|
||||
## [1.11.7](https://github.com/go-gitea/gitea/releases/tag/v1.11.7) - 2020-06-18
|
||||
|
||||
* BUGFIXES
|
||||
* Use ID or Where to instead directly use Get when load object from database (#11925) (#11935)
|
||||
* Fix __webpack_public_path__ for 1.11 (#11907)
|
||||
* Fix verification of subkeys of default gpg key (#11713) (#11902)
|
||||
* Remove unnecessary parentheses in wiki/view template (#11781)
|
||||
* Doctor fix xorm.Count nil on sqlite error (#11741)
|
||||
|
||||
## [1.11.6](https://github.com/go-gitea/gitea/releases/tag/v1.11.6) - 2020-05-30
|
||||
|
||||
* SECURITY
|
||||
* Fix missing authorization check on pull for public repos of private/limited org (#11656) (#11683)
|
||||
* Use session for retrieving org teams (#11438) (#11439)
|
||||
* BUGFIXES
|
||||
* Return json on 500 error from API (#11574) (#11660)
|
||||
* Fix wrong milestone in webhook message (#11596) (#11612)
|
||||
* Prevent (caught) panic on login (#11590) (#11598)
|
||||
* Fix commit page js error (#11527)
|
||||
* Use media links for img in post-process (#10515) (#11504)
|
||||
* Ensure public repositories in private organizations are visible and fix admin organizations list (#11465) (#11475)
|
||||
* Set correct Content-Type value for Gogs/Gitea webhooks (#9504) (#10456) (#11461)
|
||||
* Allow all members of private orgs to see public repos (#11442) (#11459)
|
||||
* Whenever the ctx.Session is updated, release it to save it before sending the redirect (#11456) (#11457)
|
||||
* Forcibly clean and destroy the session on logout (#11447) (#11451)
|
||||
* Fix /api/v1/orgs/* endpoints by changing parameter to :org from :orgname (#11381)
|
||||
* Add tracked time fix to doctor (part of #11111) (#11138)
|
||||
* Fix webpack chunk loading with STATIC_URL_PREFIX (#11526) (#11544)
|
||||
* Remove unnecessary parentheses in wiki/revision.tmpl to allow 1.11 to build on go1.14 (#11481)
|
||||
|
||||
## [1.11.5](https://github.com/go-gitea/gitea/releases/tag/v1.11.5) - 2020-05-09
|
||||
|
||||
* BUGFIXES
|
||||
* Prevent timer leaks in Workerpool and others (#11333) (#11340)
|
||||
* Fix tracked time issues (#11349) (#11354)
|
||||
* Add NotifySyncPushCommits to indexer notifier (#11309) (#11338)
|
||||
* Allow X in addition to x in tasks (#10979) (#11335)
|
||||
* When delete tracked time through the API return 404 not 500 (#11319) (#11326)
|
||||
* Prevent duplicate records in organizations list when creating a repository (#11303) (#11325)
|
||||
* Manage port in submodule refurl (#11305) (#11323)
|
||||
* api.Context.NotFound(...) should tolerate nil (#11288) (#11306)
|
||||
* Show pull request selection even when unrelated branches (#11239) (#11283)
|
||||
* Repo: milestone: make /milestone/:id endpoint accessible (#11264) (#11282)
|
||||
* Fix GetContents(): Dont't ignore Executables (#11192) (#11209)
|
||||
* Fix submodule paths when AppSubUrl is not root (#11098) (#11176)
|
||||
* Prevent clones and pushes to disabled wiki (#11131) (#11134)
|
||||
* Remove errant third closing curly-bracket from account.tmpl and send account ID in account.tmpl (#11130)
|
||||
* On Repo Deletion: Delete related TrackedTimes too (#11110) (#11125)
|
||||
* Refresh codemirror on show pull comment tab (#11100) (#11122)
|
||||
* Fix merge dialog on protected branch with missing required statuses (#11074) (#11084)
|
||||
* Load pr Issue Poster on API too (#11033) (#11039)
|
||||
* Fix release counter on API repository info (#10968) (#10996)
|
||||
* Generate Diff and Patch direct from Pull head (#10936) (#10938)
|
||||
* Fix rebase conflict detection in git 2.26 (#10929) (#10930)
|
||||
* ENHANCEMENT
|
||||
* Fix 404 and 500 image size in small size screen (#11043) (#11049)
|
||||
* Multiple Gitea Doctor improvements (#10943) (#10990) (#10064) (#9095) (#10991)
|
||||
|
||||
## [1.11.4](https://github.com/go-gitea/gitea/releases/tag/v1.11.4) - 2020-04-01
|
||||
|
||||
* BUGFIXES
|
||||
* Only update merge_base if not already merged (#10909)
|
||||
* Fix milestones too many SQL variables bug (#10880) (#10904)
|
||||
* Protect against NPEs in notifications list (#10879) (#10883)
|
||||
* Convert plumbing.ErrObjectNotFound to git.ErrNotExist in getCommit (#10862) (#10868)
|
||||
* Convert plumbing.ErrReferenceNotFound to git.ErrNotExist in GetRefCommitID (#10676) (#10797)
|
||||
* Account for empty lines in receive-hook message (#10773) (#10784)
|
||||
* Fix bug on branch API (#10767) (#10775)
|
||||
* Migrate to go-git/go-git v5.0.0 (#10735) (#10753)
|
||||
* Fix hiding of fields in authorization source page (#10734) (#10752)
|
||||
* Prevent default for linkAction (#10742) (#10743)
|
||||
|
||||
## [1.11.3](https://github.com/go-gitea/gitea/releases/tag/v1.11.3) - 2020-03-10
|
||||
|
||||
* BUGFIXES
|
||||
* Prevent panic in stopwatch (#10670) (#10673)
|
||||
* Fix bug on pull view when required status check no ci result (#10648) (#10651)
|
||||
* Build explicitly with Go 1.13 (#10684)
|
||||
|
||||
## [1.11.2](https://github.com/go-gitea/gitea/releases/tag/v1.11.2) - 2020-03-06
|
||||
|
||||
* BREAKING
|
||||
* Various fixes in login sources (#10428) (#10429)
|
||||
* SECURITY
|
||||
* Ensure only own addresses are updated (#10397) (#10399)
|
||||
* Logout POST action (#10582) (#10585)
|
||||
* Org action fixes and form cleanup (#10512) (#10514)
|
||||
* Change action GETs to POST (#10462) (#10464)
|
||||
* Fix admin notices (#10480) (#10483)
|
||||
* Change admin dashboard to POST (#10465) (#10466)
|
||||
* Update markbates/goth (#10444) (#10445)
|
||||
* Update crypto vendors (#10385) (#10398)
|
||||
* BUGFIXES
|
||||
* Allow users with write permissions to modify issue descriptions and comments. (#10623) (#10626)
|
||||
* Handle deleted base branch in PR (#10618) (#10619)
|
||||
* Delete dependencies when deleting a repository (#10608) (#10616)
|
||||
* Ensure executable bit is kept on the web editor (#10607) (#10614)
|
||||
* Update mergebase in pr checker (#10586) (#10605)
|
||||
* Fix release attachments being deleted while upgrading (#10572) (#10573)
|
||||
* Fix redirection path if Slack webhook channel is invalid (#10566)
|
||||
* Fix head.tmpl og:image picture location (#10531) (#10556)
|
||||
* Fix 404 after activating secondary email (#10547) (#10553)
|
||||
* Show Signer in commit lists and add basic trust (#10425 & #10511) (#10524)
|
||||
* Fix potential bugs (#10513) (#10518)
|
||||
* Use \[:space:\] instead of \\s (#10508) (#10509)
|
||||
* Avoid mailing users that have explicitly unwatched an issue (#10475) (#10500)
|
||||
* Handle push rejection message in Merge & Web Editor (#10373) (#10497)
|
||||
* Fix SQLite concurrency problems by using BEGIN IMMEDIATE (#10368) (#10493)
|
||||
* Fix double PR notification from API (#10482) (#10486)
|
||||
* Show the username as a fallback on feeds if full name is blank (#10461)
|
||||
* Trigger webhooks on issue label-change via API too (#10421) (#10439)
|
||||
* Fix git reference type in webhooks (#10427) (#10432)
|
||||
* Prevent panic on merge to PR (#10403) (#10408)
|
||||
* Fix wrong num closed issues on repository when close issue via commit… (#10364) (#10380)
|
||||
* Reading pull attachments should depend on read UnitTypePullRequests (#10346) (#10354)
|
||||
* Set max-width on review-box comment box (#10348) (#10353)
|
||||
* Prevent nil pointer in GetPullRequestCommitStatusState (#10342) (#10344)
|
||||
* Fix protected branch status check settings (#10341) (#10343)
|
||||
* Truncate long commit message header (#10301) (#10319)
|
||||
* Set the initial commit status to Success otherwise it will always be Pending (#10317) (#10318)
|
||||
* Don't manually replace whitespace during render (#10291) (#10315)
|
||||
* ENHANCEMENT
|
||||
* Admin page for managing user e-mail activation (#10557) (#10579)
|
||||
|
||||
## [1.11.1](https://github.com/go-gitea/gitea/releases/tag/v1.11.1) - 2020-02-15
|
||||
|
||||
* BUGFIXES
|
||||
* Repo name added to automatically generated commit message when merging (#9997) (#10285)
|
||||
* Fix Workerpool deadlock (#10283) (#10284)
|
||||
* Divide GetIssueStats query in smaller chunks (#10176) (#10282)
|
||||
* Fix reply on code review (#10257)
|
||||
* Stop hanging issue indexer initialisation from preventing shutdown (#10243) (#10249)
|
||||
* Fix filter label emoji width (#10241) (#10244)
|
||||
* Fix issue sidebar menus having an infinite height (#10239) (#10240)
|
||||
* Fix commit between two commits calculation if there is only last commit (#10225) (#10226)
|
||||
* Only check for conflicts/merging if the PR has not been merged in the interim (#10132) (#10206)
|
||||
* Blacklist manifest.json & milestones user (#10292) (#10293)
|
||||
|
||||
## [1.11.0](https://github.com/go-gitea/gitea/releases/tag/v1.11.0) - 2020-02-10
|
||||
* BREAKING
|
||||
* Fix followers and following tabs in profile (#10202) (#10203)
|
||||
* Make CertFile and KeyFile relative to CustomPath (#9868) (#9874)
|
||||
* Remove unused endpoints (#9538)
|
||||
* Prefix all user-generated IDs in markup (#9477)
|
||||
* Enforce Gitea environment for pushes (#8982)
|
||||
* Hide some user information via API if user have no enough permission (#8655)
|
||||
* Hide some user information via API if user have not enough permissions (#8655)
|
||||
* Move startpage/homepage translation to crowdin (#8596)
|
||||
* SECURITY
|
||||
* Never allow an empty password to validate (#9682) (#9683)
|
||||
* Prevent redirect to Host (#9678) (#9679)
|
||||
* Swagger hide search field (#9554)
|
||||
* Add "search" to reserved usernames (#9063)
|
||||
* Switch to fomantic-ui (#9374)
|
||||
* Only serve attachments when linked to issue/release and if accessible by user (#9340)
|
||||
* FEATURES
|
||||
* Webhooks should only show sender if it makes sense (#9601)
|
||||
* Provide Default messages for merges (#9393)
|
||||
|
@ -44,6 +196,68 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
|||
* Sign merges, CRUD, Wiki and Repository initialisation with gpg key (#7631)
|
||||
* Add basic repository lfs management (#7199)
|
||||
* BUGFIXES
|
||||
* Fix code-expansion arc-green theme bug (#10180) (#10185)
|
||||
* Prevent double wait-group decrement (#10170) (#10175)
|
||||
* Allow emoji on review head comments (#10159) (#10174)
|
||||
* Fix issue/pull link (#10158) (#10173)
|
||||
* Fix push-create SSH bugs (#10145) (#10151)
|
||||
* Prevent DeleteUser API abuse (#10125) (#10128)
|
||||
* Fix issues/pulls dashboard paging error (#10114) (#10115)
|
||||
* Add button to revert SimpleMDE to plain textarea (#10099) (#10102)
|
||||
* Fix branch page pull request title and link error (#10092) (#10097)
|
||||
* Fix PR API: Only try to get HeadBranch if HeadRepo exist (#10029) (#10088)
|
||||
* Update topics repo count when deleting repository (#10051) (#10081)
|
||||
* Show pull icon on pull requests (#10061) (#10062)
|
||||
* Fix milestone API state parameter unhandled (#10049) (#10052)
|
||||
* Move to using a temporary repo for pushing new PRs (#10009) (#10042)
|
||||
* Fix wiki raw view on sub path (#10002) (#10040)
|
||||
* Ensure that feeds are appropriately restricted (#10018) (#10019)
|
||||
* Sanitize credentials in mirror form (#9975) (#9991)
|
||||
* Close related pull requests when deleting head repository or head branch (#9927) (#9974)
|
||||
* Switch to use -f instead of -F for sendmail (#9961) (#9970)
|
||||
* Fix file rename/copy not supported by indexer (#9965) (#9967)
|
||||
* Fix repo indexer not updating upon push (#9957) (#9963)
|
||||
* Don't convert ellipsis in markdown (#9905) (#9937)
|
||||
* Fixed repo link in generated comment for cross repository dependency (#9863) (#9935)
|
||||
* Check if diff actually contains sections when rendering (#9926) (#9933)
|
||||
* Fix wrong hint when status checking is running on pull request view (#9886) (#9928)
|
||||
* Fix RocketChat (#9908) (#9921)
|
||||
* Do not try to recreate ldap user if they are already created (#9900) (#9919)
|
||||
* Create terminated channel in queue_redis (#9910) (#9911)
|
||||
* Prevent empty LDAP search result from deactivating all users (#9879) (#9896)
|
||||
* Fix wrong permissions check when issues/prs shared operations (#9885) (#9889)
|
||||
* Check user != nil before checking values (#9881) (#9883)
|
||||
* Allow hyphen in language name (#9873) (#9880)
|
||||
* Ensure that 2fa is checked on reset-password (#9857) (#9876)
|
||||
* Fix issues/pulls dependencies problems (#9842) (#9864)
|
||||
* Fix markdown anchor links (#9673) (#9840)
|
||||
* Allow assignee on Pull Creation when Issue Unit is deactivated (#9836) (#9837)
|
||||
* Fix download file wrong content-type (#9825) (#9834)
|
||||
* Fix wrong poster identity on a migrated pull request when submit review (#9827) (#9830)
|
||||
* Fix database dump when log directory is missing (#9818) (#9819)
|
||||
* Fix compare (#9808) (#9814)
|
||||
* Fix push-to-create (#9772) (#9797)
|
||||
* Fix missing msteam webhook on organization (#9781) (#9794)
|
||||
* Fix missing unlock in uniquequeue (#9790) (#9791)
|
||||
* Fix add team on collaborator page when same name as organization (#9778)
|
||||
* DeleteRepoFile incorrectly handles Delete to new branch (#9769) (#9775)
|
||||
* Fix milestones page (#9771)
|
||||
* Fix SimpleMDE quote reply (#9757) (#9768)
|
||||
* Fix missing updated time on migrated issues and comments (#9744) (#9764)
|
||||
* Move Errored PRs out of StatusChecking (#9675) (#9726)
|
||||
* Make hook status printing configurable with delay (#9641) (#9725)
|
||||
* Fix /repos/issues/search (#9698) (#9724)
|
||||
* Silence fomantic error regarding tabs (#9713) (#9718)
|
||||
* Remove unused lock (#9709) (#9710)
|
||||
* Remove q.lock.Unlock() in setInternal to prevent panic (#9705) (#9706)
|
||||
* Load milestone in API PR list (#9671) (#9700)
|
||||
* Don't attempt to close issue if already closed (#9696) (#9699)
|
||||
* Remove google font call (#9668) (#9681)
|
||||
* Eliminate horizontal scroll caused by footer (#9674)
|
||||
* Fix nil reference in repo generation (#9660) (#9666)
|
||||
* Add HTML URL to API Issues (#9654) (#9661)
|
||||
* Add PR review webhook to Telegram (#9653) (#9655)
|
||||
* Use filepath.IsAbs instead of path.IsAbs (#9651) (#9652)
|
||||
* Disable remove button on repository teams when have access to all (#9640)
|
||||
* Clean up old references on branch delete (#9614)
|
||||
* Hide public repos owned by private orgs (#9609)
|
||||
|
@ -175,6 +389,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
|||
* Fix migrate mirror 500 bug (#8526)
|
||||
* Fix password complexity regex for special characters (on master) (#8525)
|
||||
* ENHANCEMENTS
|
||||
* Explicitly refer to PR in squash-merge commit message in case of external tracker (#9844) (#9855)
|
||||
* Add a /user/login landing page option (#9622)
|
||||
* Some more e-mail notification fixes (#9596)
|
||||
* Add branch protection option to block merge on requested changes. (#9592)
|
||||
|
@ -291,12 +506,6 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
|||
* wiki - add 'write' 'preview' buttons to wiki edit like in issues (#7241)
|
||||
* Change target branch for pull request (#6488)
|
||||
* Display PR commits and diffs using base repo rather than forked (#3648)
|
||||
* SECURITY
|
||||
* Swagger hide search field (#9554)
|
||||
* Add "search" to reserved usernames (#9063)
|
||||
* Switch to fomantic-ui (#9374)
|
||||
* Only serve attachments when linked to issue/release and if accessible by user (#9340)
|
||||
* Hide credentials when submitting migration through API (#9102)
|
||||
* TESTING
|
||||
* Add debug option to serv to help debug problems (#9492)
|
||||
* Fix the intermittent TestGPGGit failures (#9360)
|
||||
|
@ -310,10 +519,12 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
|||
* Update Github Migration Tests (#8893) (#8938)
|
||||
* Update heatmap fixtures to restore tests (#8615)
|
||||
* TRANSLATION
|
||||
* Fix Korean locales (#9761) (#9780)
|
||||
* Fix placeholders in the error message (#9060)
|
||||
* Fix spelling of admin.users.max_repo_creation (#8934)
|
||||
* Improve german translation of homepage (#8549)
|
||||
* BUILD
|
||||
* Fix webpack polyfills (#9735) (#9738)
|
||||
* Update gitea.com/macaron to 1.4.0 (#9608)
|
||||
* Upgrade lato fonts to v16. (#9498)
|
||||
* Update alpine to 3.11 (#9440)
|
||||
|
@ -344,6 +555,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
|||
* Update the provided gitea.service to mention socket activation (#8531)
|
||||
* Doc added how to setup email (#8520)
|
||||
* MISC
|
||||
* Backport Locales [2020-01-14] (#9773)
|
||||
* Add translatable Powered by Gitea text in footer (#9600)
|
||||
* Add contrib/environment-to-ini (#9519)
|
||||
* Remove unnecessary loading of settings in update hook (#9496)
|
||||
|
@ -384,6 +596,48 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
|||
* Update CodeMirror to version 5.49.0 (#8381)
|
||||
* Wiki editor: enable side-by-side button (#7242)
|
||||
|
||||
## [1.10.6](https://github.com/go-gitea/gitea/releases/tag/v1.10.6) - 2020-03-10
|
||||
|
||||
This is a re-tag version of v1.10.5 and also explicitly built with Go 1.13.
|
||||
|
||||
WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be used.
|
||||
|
||||
## [1.10.5](https://github.com/go-gitea/gitea/releases/tag/v1.10.5) - 2020-03-06
|
||||
|
||||
* BUGFIXES
|
||||
* Fix release attachments being deleted while upgrading (#10572) (#10574)
|
||||
|
||||
## [1.10.4](https://github.com/go-gitea/gitea/releases/tag/v1.10.4) - 2020-02-16
|
||||
|
||||
* FEATURE
|
||||
* Prevent empty LDAP search from deactivating all users (#9879) (#9890)
|
||||
* BUGFIXES
|
||||
* Fix reply on code review (#10261) (#10227)
|
||||
* Fix branch page pull request title and link error (#10092) (#10098)
|
||||
* Fix milestone API state parameter unhandled (#10049) (#10053)
|
||||
* Fix wiki raw view on sub path (#10002) (#10041)
|
||||
* Fix RocketChat Webhook (#9908) (#9921) (#9925)
|
||||
* Fix bug about wrong dependencies permissions check and other wrong permissions check (#9884) (Partial backport #9842)
|
||||
* Ensure that 2fa is checked on reset-password (#9857) (#9877)
|
||||
|
||||
## [1.10.3](https://github.com/go-gitea/gitea/releases/tag/v1.10.3) - 2020-01-17
|
||||
* SECURITY
|
||||
* Hide credentials when submitting migration (#9102) (#9704)
|
||||
* Never allow an empty password to validate (#9682) (#9684)
|
||||
* Prevent redirect to Host (#9678) (#9680)
|
||||
* Hide public repos owned by private orgs (#9609) (#9616)
|
||||
* BUGFIXES
|
||||
* Allow assignee on Pull Creation when Issue Unit is deactivated (#9836) (#9838)
|
||||
* Fix download file wrong content-type (#9825) (#9835)
|
||||
* Fix wrong identify poster on a migrated pull request when submit review (#9827) (#9831)
|
||||
* Fix dump non-exist log directory (#9818) (#9820)
|
||||
* Fix compare (#9808) (#9815)
|
||||
* Fix missing msteam webhook on organization (#9781) (#9795)
|
||||
* Fix add team on collaborator page when same name as organization (#9783)
|
||||
* Fix cache problem on dashboard (#9358) (#9703)
|
||||
* Send tag create and push webhook when release created on UI (#8671) (#9702)
|
||||
* Branches not at ref commit ID should not be listed as Merged (#9614) (#9639)
|
||||
|
||||
## [1.10.2](https://github.com/go-gitea/gitea/releases/tag/v1.10.2) - 2020-01-02
|
||||
* BUGFIXES
|
||||
* Allow only specific Columns to be updated on Issue via API (#9539) (#9580)
|
||||
|
@ -1483,13 +1737,13 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
|||
* BUGFIXES
|
||||
* Allow resend of confirmation email when logged in (#6482) (#6487)
|
||||
|
||||
## [1.7.5](https://github.com/go-gitea/gitea/releases/tag/v1.7.5) - 2019-03-27
|
||||
## [1.7.5](https://github.com/go-gitea/gitea/releases/tag/v1.7.5) - 2019-03-27
|
||||
* BUGFIXES
|
||||
* Fix unitTypeCode not being used in accessLevelUnit (#6419) (#6423)
|
||||
* Fix bug where manifest.json was being requested without cookies and continuously creating new sessions (#6372) (#6383)
|
||||
* Fix ParsePatch function to work with quoted diff --git strings (#6323) (#6332)
|
||||
|
||||
## [1.7.4](https://github.com/go-gitea/gitea/releases/tag/v1.7.4) - 2019-03-12
|
||||
## [1.7.4](https://github.com/go-gitea/gitea/releases/tag/v1.7.4) - 2019-03-12
|
||||
* SECURITY
|
||||
* Fix potential XSS vulnerability in repository description. (#6306) (#6308)
|
||||
* BUGFIXES
|
||||
|
|
65
Makefile
65
Makefile
|
@ -29,6 +29,8 @@ EXTRA_GOFLAGS ?=
|
|||
|
||||
MAKE_VERSION := $(shell $(MAKE) -v | head -n 1)
|
||||
|
||||
STORED_VERSION_FILE := VERSION
|
||||
|
||||
ifneq ($(DRONE_TAG),)
|
||||
VERSION ?= $(subst v,,$(DRONE_TAG))
|
||||
GITEA_VERSION ?= $(VERSION)
|
||||
|
@ -38,7 +40,13 @@ else
|
|||
else
|
||||
VERSION ?= master
|
||||
endif
|
||||
GITEA_VERSION ?= $(shell git describe --tags --always | sed 's/-/+/' | sed 's/^v//')
|
||||
|
||||
STORED_VERSION=$(shell cat $(STORED_VERSION_FILE) 2>/dev/null)
|
||||
ifneq ($(STORED_VERSION),)
|
||||
GITEA_VERSION ?= $(STORED_VERSION)
|
||||
else
|
||||
GITEA_VERSION ?= $(shell git describe --tags --always | sed 's/-/+/' | sed 's/^v//')
|
||||
endif
|
||||
endif
|
||||
|
||||
LDFLAGS := $(LDFLAGS) -X "main.MakeVersion=$(MAKE_VERSION)" -X "main.Version=$(GITEA_VERSION)" -X "main.Tags=$(TAGS)"
|
||||
|
@ -96,13 +104,15 @@ include docker/Makefile
|
|||
help:
|
||||
@echo "Make Routines:"
|
||||
@echo " - \"\" equivalent to \"build\""
|
||||
@echo " - build creates the entire project"
|
||||
@echo " - clean delete integration files and build files but not css and js files"
|
||||
@echo " - clean-all delete all generated files (integration test, build, css and js files)"
|
||||
@echo " - build build everything"
|
||||
@echo " - frontend build frontend files"
|
||||
@echo " - backend build backend files"
|
||||
@echo " - clean delete backend and integration files"
|
||||
@echo " - clean-all delete backend, frontend and integration files"
|
||||
@echo " - css rebuild only css files"
|
||||
@echo " - js rebuild only js files"
|
||||
@echo " - generate run \"make css js\" and \"go generate\""
|
||||
@echo " - fmt format the code"
|
||||
@echo " - generate run \"go generate\""
|
||||
@echo " - fmt format the Go code"
|
||||
@echo " - generate-swagger generate the swagger spec from code comments"
|
||||
@echo " - swagger-validate check if the swagger spec is valide"
|
||||
@echo " - revive run code linter revive"
|
||||
|
@ -113,12 +123,19 @@ help:
|
|||
|
||||
.PHONY: go-check
|
||||
go-check:
|
||||
$(eval GO_VERSION := $(shell printf "%03d%03d%03d" $(shell go version | grep -Eo '[0-9]+\.?[0-9]+?\.?[0-9]?\s' | tr '.' ' ');))
|
||||
$(eval GO_VERSION := $(shell printf "%03d%03d%03d" $(shell go version | grep -Eo '[0-9]+\.?[0-9]+?\.?[0-9]?[[:space:]]' | tr '.' ' ');))
|
||||
@if [ "$(GO_VERSION)" -lt "001011000" ]; then \
|
||||
echo "Gitea requires Go 1.11.0 or greater to build. You can get it at https://golang.org/dl/"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
.PHONY: git-check
|
||||
git-check:
|
||||
@if git lfs >/dev/null 2>&1 ; then : ; else \
|
||||
echo "Gitea requires git with lfs support to run tests." ; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
.PHONY: node-check
|
||||
node-check:
|
||||
$(eval NODE_VERSION := $(shell printf "%03d%03d%03d" $(shell node -v | grep -Eo '[0-9]+\.?[0-9]+?\.?[0-9]?' | tr '.' ' ');))
|
||||
|
@ -149,10 +166,6 @@ fmt:
|
|||
vet:
|
||||
$(GO) vet $(PACKAGES)
|
||||
|
||||
.PHONY: generate
|
||||
generate: js css
|
||||
GO111MODULE=on $(GO) generate -mod=vendor $(PACKAGES)
|
||||
|
||||
.PHONY: generate-swagger
|
||||
generate-swagger:
|
||||
$(SWAGGER) generate spec -o './$(SWAGGER_SPEC)'
|
||||
|
@ -233,7 +246,7 @@ coverage:
|
|||
|
||||
.PHONY: unit-test-coverage
|
||||
unit-test-coverage:
|
||||
$(GO) test -tags='sqlite sqlite_unlock_notify' -cover -coverprofile coverage.out $(PACKAGES) && echo "\n==>\033[32m Ok\033[m\n" || exit 1
|
||||
GO111MODULE=on $(GO) test -mod=vendor -tags='sqlite sqlite_unlock_notify' -cover -coverprofile coverage.out $(PACKAGES) && echo "\n==>\033[32m Ok\033[m\n" || exit 1
|
||||
|
||||
.PHONY: vendor
|
||||
vendor:
|
||||
|
@ -376,7 +389,7 @@ integrations.mssql.test: $(GO_SOURCES)
|
|||
integrations.sqlite.test: $(GO_SOURCES)
|
||||
GO111MODULE=on $(GO) test -mod=vendor -c code.gitea.io/gitea/integrations -o integrations.sqlite.test -tags 'sqlite sqlite_unlock_notify'
|
||||
|
||||
integrations.cover.test: $(GO_SOURCES)
|
||||
integrations.cover.test: git-check $(GO_SOURCES)
|
||||
GO111MODULE=on $(GO) test -mod=vendor -c code.gitea.io/gitea/integrations -coverpkg $(shell echo $(PACKAGES) | tr ' ' ',') -o integrations.cover.test
|
||||
|
||||
.PHONY: migrations.mysql.test
|
||||
|
@ -407,13 +420,23 @@ install: $(wildcard *.go)
|
|||
$(GO) install -v -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)'
|
||||
|
||||
.PHONY: build
|
||||
build: go-check generate $(EXECUTABLE)
|
||||
build: frontend backend
|
||||
|
||||
.PHONY: frontend
|
||||
frontend: node-check js css
|
||||
|
||||
.PHONY: backend
|
||||
backend: go-check generate $(EXECUTABLE)
|
||||
|
||||
.PHONY: generate
|
||||
generate:
|
||||
GO111MODULE=on $(GO) generate -mod=vendor $(PACKAGES)
|
||||
|
||||
$(EXECUTABLE): $(GO_SOURCES)
|
||||
GO111MODULE=on $(GO) build -mod=vendor $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)' -o $@
|
||||
|
||||
.PHONY: release
|
||||
release: generate release-dirs release-windows release-linux release-darwin release-copy release-compress release-check
|
||||
release: frontend generate release-dirs release-windows release-linux release-darwin release-copy release-compress release-sources release-check
|
||||
|
||||
.PHONY: release-dirs
|
||||
release-dirs:
|
||||
|
@ -424,7 +447,7 @@ release-windows:
|
|||
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
||||
$(GO) get -u src.techknowlogick.com/xgo; \
|
||||
fi
|
||||
xgo -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION) .
|
||||
xgo -go go-1.13 -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION) .
|
||||
ifeq ($(CI),drone)
|
||||
cp /build/* $(DIST)/binaries
|
||||
endif
|
||||
|
@ -434,7 +457,7 @@ release-linux:
|
|||
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
||||
$(GO) get -u src.techknowlogick.com/xgo; \
|
||||
fi
|
||||
xgo -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'linux/amd64,linux/386,linux/arm-5,linux/arm-6,linux/arm64,linux/mips64le,linux/mips,linux/mipsle' -out gitea-$(VERSION) .
|
||||
xgo -go go-1.13 -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'linux/amd64,linux/386,linux/arm-5,linux/arm-6,linux/arm64,linux/mips64le,linux/mips,linux/mipsle' -out gitea-$(VERSION) .
|
||||
ifeq ($(CI),drone)
|
||||
cp /build/* $(DIST)/binaries
|
||||
endif
|
||||
|
@ -444,7 +467,7 @@ release-darwin:
|
|||
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
||||
$(GO) get -u src.techknowlogick.com/xgo; \
|
||||
fi
|
||||
xgo -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '$(LDFLAGS)' -targets 'darwin/*' -out gitea-$(VERSION) .
|
||||
xgo -go go-1.13 -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '$(LDFLAGS)' -targets 'darwin/*' -out gitea-$(VERSION) .
|
||||
ifeq ($(CI),drone)
|
||||
cp /build/* $(DIST)/binaries
|
||||
endif
|
||||
|
@ -464,6 +487,12 @@ release-compress:
|
|||
fi
|
||||
cd $(DIST)/release/; for file in `find . -type f -name "*"`; do echo "compressing $${file}" && gxz -k -9 $${file}; done;
|
||||
|
||||
.PHONY: release-sources
|
||||
release-sources: | node_modules
|
||||
echo $(VERSION) > $(STORED_VERSION_FILE)
|
||||
tar --exclude=./$(DIST) --exclude=./.git --exclude=./node_modules/.cache -czf $(DIST)/release/gitea-src-$(VERSION).tar.gz .
|
||||
rm -f $(STORED_VERSION_FILE)
|
||||
|
||||
node_modules: package-lock.json
|
||||
npm install --no-save
|
||||
|
||||
|
|
|
@ -33,6 +33,15 @@ From the root of the source tree, run:
|
|||
|
||||
TAGS="bindata" make build
|
||||
|
||||
The `build` target is split into two sub-targets:
|
||||
|
||||
- `make backend` which requires [Go 1.11](https://golang.org/dl/) or greater.
|
||||
- `make frontend` which requires [Node.js 10.0.0](https://nodejs.org/en/download/) or greater.
|
||||
|
||||
If pre-built frontend files are present it is possible to only build the backend:
|
||||
|
||||
TAGS="bindata" make backend
|
||||
|
||||
More info: https://docs.gitea.io/en-us/install-from-source/
|
||||
|
||||
## Using
|
||||
|
|
|
@ -61,6 +61,10 @@ var (
|
|||
Name: "admin-filter",
|
||||
Usage: "An LDAP filter specifying if a user should be given administrator privileges.",
|
||||
},
|
||||
cli.BoolFlag{
|
||||
Name: "allow-deactivate-all",
|
||||
Usage: "Allow empty search results to deactivate all users.",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "username-attribute",
|
||||
Usage: "The attribute of the user’s LDAP record containing the user name.",
|
||||
|
@ -231,6 +235,9 @@ func parseLdapConfig(c *cli.Context, config *models.LDAPConfig) error {
|
|||
if c.IsSet("admin-filter") {
|
||||
config.Source.AdminFilter = c.String("admin-filter")
|
||||
}
|
||||
if c.IsSet("allow-deactivate-all") {
|
||||
config.Source.AllowDeactivateAll = c.Bool("allow-deactivate-all")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,529 @@
|
|||
// Copyright 2019 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
golog "log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
"code.gitea.io/gitea/models/migrations"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/options"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"xorm.io/builder"
|
||||
|
||||
"github.com/urfave/cli"
|
||||
)
|
||||
|
||||
// CmdDoctor represents the available doctor sub-command.
|
||||
var CmdDoctor = cli.Command{
|
||||
Name: "doctor",
|
||||
Usage: "Diagnose problems",
|
||||
Description: "A command to diagnose problems with the current Gitea instance according to the given configuration.",
|
||||
Action: runDoctor,
|
||||
Flags: []cli.Flag{
|
||||
cli.BoolFlag{
|
||||
Name: "list",
|
||||
Usage: "List the available checks",
|
||||
},
|
||||
cli.BoolFlag{
|
||||
Name: "default",
|
||||
Usage: "Run the default checks (if neither --run or --all is set, this is the default behaviour)",
|
||||
},
|
||||
cli.StringSliceFlag{
|
||||
Name: "run",
|
||||
Usage: "Run the provided checks - (if --default is set, the default checks will also run)",
|
||||
},
|
||||
cli.BoolFlag{
|
||||
Name: "all",
|
||||
Usage: "Run all the available checks",
|
||||
},
|
||||
cli.BoolFlag{
|
||||
Name: "fix",
|
||||
Usage: "Automatically fix what we can",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "log-file",
|
||||
Usage: `Name of the log file (default: "doctor.log"). Set to "-" to output to stdout, set to "" to disable`,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
type check struct {
|
||||
title string
|
||||
name string
|
||||
isDefault bool
|
||||
f func(ctx *cli.Context) ([]string, error)
|
||||
abortIfFailed bool
|
||||
skipDatabaseInit bool
|
||||
}
|
||||
|
||||
// checklist represents list for all checks
|
||||
var checklist = []check{
|
||||
{
|
||||
// NOTE: this check should be the first in the list
|
||||
title: "Check paths and basic configuration",
|
||||
name: "paths",
|
||||
isDefault: true,
|
||||
f: runDoctorPathInfo,
|
||||
abortIfFailed: true,
|
||||
skipDatabaseInit: true,
|
||||
},
|
||||
{
|
||||
title: "Check Database Version",
|
||||
name: "check-db-version",
|
||||
isDefault: true,
|
||||
f: runDoctorCheckDBVersion,
|
||||
abortIfFailed: true,
|
||||
},
|
||||
{
|
||||
title: "Check if OpenSSH authorized_keys file is up-to-date",
|
||||
name: "authorized_keys",
|
||||
isDefault: true,
|
||||
f: runDoctorAuthorizedKeys,
|
||||
},
|
||||
{
|
||||
title: "Check if SCRIPT_TYPE is available",
|
||||
name: "script-type",
|
||||
isDefault: false,
|
||||
f: runDoctorScriptType,
|
||||
},
|
||||
{
|
||||
title: "Check if hook files are up-to-date and executable",
|
||||
name: "hooks",
|
||||
isDefault: false,
|
||||
f: runDoctorHooks,
|
||||
},
|
||||
{
|
||||
title: "Recalculate merge bases",
|
||||
name: "recalculate_merge_bases",
|
||||
isDefault: false,
|
||||
f: runDoctorPRMergeBase,
|
||||
},
|
||||
{
|
||||
title: "Check consistency of database",
|
||||
name: "check-db-consistency",
|
||||
isDefault: true,
|
||||
f: runDoctorCheckDBConsistency,
|
||||
},
|
||||
// more checks please append here
|
||||
}
|
||||
|
||||
func runDoctor(ctx *cli.Context) error {
|
||||
|
||||
// Silence the default loggers
|
||||
log.DelNamedLogger("console")
|
||||
log.DelNamedLogger(log.DEFAULT)
|
||||
|
||||
// Now setup our own
|
||||
logFile := ctx.String("log-file")
|
||||
if !ctx.IsSet("log-file") {
|
||||
logFile = "doctor.log"
|
||||
}
|
||||
|
||||
if len(logFile) == 0 {
|
||||
log.NewLogger(1000, "doctor", "console", `{"level":"NONE","stacktracelevel":"NONE","colorize":"%t"}`)
|
||||
} else if logFile == "-" {
|
||||
log.NewLogger(1000, "doctor", "console", `{"level":"trace","stacktracelevel":"NONE"}`)
|
||||
} else {
|
||||
log.NewLogger(1000, "doctor", "file", fmt.Sprintf(`{"filename":%q,"level":"trace","stacktracelevel":"NONE"}`, logFile))
|
||||
}
|
||||
|
||||
// Finally redirect the default golog to here
|
||||
golog.SetFlags(0)
|
||||
golog.SetPrefix("")
|
||||
golog.SetOutput(log.NewLoggerAsWriter("INFO", log.GetLogger(log.DEFAULT)))
|
||||
|
||||
if ctx.IsSet("list") {
|
||||
w := tabwriter.NewWriter(os.Stdout, 0, 8, 0, '\t', 0)
|
||||
_, _ = w.Write([]byte("Default\tName\tTitle\n"))
|
||||
for _, check := range checklist {
|
||||
if check.isDefault {
|
||||
_, _ = w.Write([]byte{'*'})
|
||||
}
|
||||
_, _ = w.Write([]byte{'\t'})
|
||||
_, _ = w.Write([]byte(check.name))
|
||||
_, _ = w.Write([]byte{'\t'})
|
||||
_, _ = w.Write([]byte(check.title))
|
||||
_, _ = w.Write([]byte{'\n'})
|
||||
}
|
||||
return w.Flush()
|
||||
}
|
||||
|
||||
var checks []check
|
||||
if ctx.Bool("all") {
|
||||
checks = checklist
|
||||
} else if ctx.IsSet("run") {
|
||||
addDefault := ctx.Bool("default")
|
||||
names := ctx.StringSlice("run")
|
||||
for i, name := range names {
|
||||
names[i] = strings.ToLower(strings.TrimSpace(name))
|
||||
}
|
||||
|
||||
for _, check := range checklist {
|
||||
if addDefault && check.isDefault {
|
||||
checks = append(checks, check)
|
||||
continue
|
||||
}
|
||||
for _, name := range names {
|
||||
if name == check.name {
|
||||
checks = append(checks, check)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for _, check := range checklist {
|
||||
if check.isDefault {
|
||||
checks = append(checks, check)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dbIsInit := false
|
||||
|
||||
for i, check := range checks {
|
||||
if !dbIsInit && !check.skipDatabaseInit {
|
||||
// Only open database after the most basic configuration check
|
||||
setting.EnableXORMLog = false
|
||||
if err := initDBDisableConsole(true); err != nil {
|
||||
fmt.Println(err)
|
||||
fmt.Println("Check if you are using the right config file. You can use a --config directive to specify one.")
|
||||
return nil
|
||||
}
|
||||
dbIsInit = true
|
||||
}
|
||||
fmt.Println("[", i+1, "]", check.title)
|
||||
messages, err := check.f(ctx)
|
||||
for _, message := range messages {
|
||||
fmt.Println("-", message)
|
||||
}
|
||||
if err != nil {
|
||||
fmt.Println("Error:", err)
|
||||
if check.abortIfFailed {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
fmt.Println("OK.")
|
||||
}
|
||||
fmt.Println()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func runDoctorPathInfo(ctx *cli.Context) ([]string, error) {
|
||||
|
||||
res := make([]string, 0, 10)
|
||||
|
||||
if fi, err := os.Stat(setting.CustomConf); err != nil || !fi.Mode().IsRegular() {
|
||||
res = append(res, fmt.Sprintf("Failed to find configuration file at '%s'.", setting.CustomConf))
|
||||
res = append(res, fmt.Sprintf("If you've never ran Gitea yet, this is normal and '%s' will be created for you on first run.", setting.CustomConf))
|
||||
res = append(res, "Otherwise check that you are running this command from the correct path and/or provide a `--config` parameter.")
|
||||
return res, fmt.Errorf("can't proceed without a configuration file")
|
||||
}
|
||||
|
||||
setting.NewContext()
|
||||
|
||||
fail := false
|
||||
|
||||
check := func(name, path string, is_dir, required, is_write bool) {
|
||||
res = append(res, fmt.Sprintf("%-25s '%s'", name+":", path))
|
||||
fi, err := os.Stat(path)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) && ctx.Bool("fix") && is_dir {
|
||||
if err := os.MkdirAll(path, 0777); err != nil {
|
||||
res = append(res, fmt.Sprintf(" ERROR: %v", err))
|
||||
fail = true
|
||||
return
|
||||
}
|
||||
fi, err = os.Stat(path)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
if required {
|
||||
res = append(res, fmt.Sprintf(" ERROR: %v", err))
|
||||
fail = true
|
||||
return
|
||||
}
|
||||
res = append(res, fmt.Sprintf(" NOTICE: not accessible (%v)", err))
|
||||
return
|
||||
}
|
||||
|
||||
if is_dir && !fi.IsDir() {
|
||||
res = append(res, " ERROR: not a directory")
|
||||
fail = true
|
||||
return
|
||||
} else if !is_dir && !fi.Mode().IsRegular() {
|
||||
res = append(res, " ERROR: not a regular file")
|
||||
fail = true
|
||||
} else if is_write {
|
||||
if err := runDoctorWritableDir(path); err != nil {
|
||||
res = append(res, fmt.Sprintf(" ERROR: not writable: %v", err))
|
||||
fail = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Note print paths inside quotes to make any leading/trailing spaces evident
|
||||
check("Configuration File Path", setting.CustomConf, false, true, false)
|
||||
check("Repository Root Path", setting.RepoRootPath, true, true, true)
|
||||
check("Data Root Path", setting.AppDataPath, true, true, true)
|
||||
check("Custom File Root Path", setting.CustomPath, true, false, false)
|
||||
check("Work directory", setting.AppWorkPath, true, true, false)
|
||||
check("Log Root Path", setting.LogRootPath, true, true, true)
|
||||
|
||||
if options.IsDynamic() {
|
||||
// Do not check/report on StaticRootPath if data is embedded in Gitea (-tags bindata)
|
||||
check("Static File Root Path", setting.StaticRootPath, true, true, false)
|
||||
}
|
||||
|
||||
if fail {
|
||||
return res, fmt.Errorf("please check your configuration file and try again")
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func runDoctorWritableDir(path string) error {
|
||||
// There's no platform-independent way of checking if a directory is writable
|
||||
// https://stackoverflow.com/questions/20026320/how-to-tell-if-folder-exists-and-is-writable
|
||||
|
||||
tmpFile, err := ioutil.TempFile(path, "doctors-order")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := os.Remove(tmpFile.Name()); err != nil {
|
||||
fmt.Printf("Warning: can't remove temporary file: '%s'\n", tmpFile.Name())
|
||||
}
|
||||
tmpFile.Close()
|
||||
return nil
|
||||
}
|
||||
|
||||
const tplCommentPrefix = `# gitea public key`
|
||||
|
||||
func runDoctorAuthorizedKeys(ctx *cli.Context) ([]string, error) {
|
||||
if setting.SSH.StartBuiltinServer || !setting.SSH.CreateAuthorizedKeysFile {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
fPath := filepath.Join(setting.SSH.RootPath, "authorized_keys")
|
||||
f, err := os.Open(fPath)
|
||||
if err != nil {
|
||||
if ctx.Bool("fix") {
|
||||
return []string{fmt.Sprintf("Error whilst opening authorized_keys: %v. Attempting regeneration", err)}, models.RewriteAllPublicKeys()
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
linesInAuthorizedKeys := map[string]bool{}
|
||||
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
continue
|
||||
}
|
||||
linesInAuthorizedKeys[line] = true
|
||||
}
|
||||
f.Close()
|
||||
|
||||
// now we regenerate and check if there are any lines missing
|
||||
regenerated := &bytes.Buffer{}
|
||||
if err := models.RegeneratePublicKeys(regenerated); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
scanner = bufio.NewScanner(regenerated)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
continue
|
||||
}
|
||||
if ok := linesInAuthorizedKeys[line]; ok {
|
||||
continue
|
||||
}
|
||||
if ctx.Bool("fix") {
|
||||
return []string{"authorized_keys is out of date, attempting regeneration"}, models.RewriteAllPublicKeys()
|
||||
}
|
||||
return nil, fmt.Errorf("authorized_keys is out of date and should be regenerated with gitea admin regenerate keys")
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func runDoctorCheckDBVersion(ctx *cli.Context) ([]string, error) {
|
||||
if err := models.NewEngine(context.Background(), migrations.EnsureUpToDate); err != nil {
|
||||
if ctx.Bool("fix") {
|
||||
return []string{fmt.Sprintf("WARN: Got Error %v during ensure up to date", err), "Attempting to migrate to the latest DB version to fix this."}, models.NewEngine(context.Background(), migrations.Migrate)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func iterateRepositories(each func(*models.Repository) ([]string, error)) ([]string, error) {
|
||||
results := []string{}
|
||||
err := models.Iterate(
|
||||
models.DefaultDBContext(),
|
||||
new(models.Repository),
|
||||
builder.Gt{"id": 0},
|
||||
func(idx int, bean interface{}) error {
|
||||
res, err := each(bean.(*models.Repository))
|
||||
results = append(results, res...)
|
||||
return err
|
||||
},
|
||||
)
|
||||
return results, err
|
||||
}
|
||||
|
||||
func iteratePRs(repo *models.Repository, each func(*models.Repository, *models.PullRequest) ([]string, error)) ([]string, error) {
|
||||
results := []string{}
|
||||
err := models.Iterate(
|
||||
models.DefaultDBContext(),
|
||||
new(models.PullRequest),
|
||||
builder.Eq{"base_repo_id": repo.ID},
|
||||
func(idx int, bean interface{}) error {
|
||||
res, err := each(repo, bean.(*models.PullRequest))
|
||||
results = append(results, res...)
|
||||
return err
|
||||
},
|
||||
)
|
||||
return results, err
|
||||
}
|
||||
|
||||
func runDoctorHooks(ctx *cli.Context) ([]string, error) {
|
||||
// Need to iterate across all of the repositories
|
||||
return iterateRepositories(func(repo *models.Repository) ([]string, error) {
|
||||
results, err := models.CheckDelegateHooks(repo.RepoPath())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(results) > 0 && ctx.Bool("fix") {
|
||||
return []string{fmt.Sprintf("regenerated hooks for %s", repo.FullName())}, models.CreateDelegateHooks(repo.RepoPath())
|
||||
}
|
||||
|
||||
return results, nil
|
||||
})
|
||||
}
|
||||
|
||||
func runDoctorPRMergeBase(ctx *cli.Context) ([]string, error) {
|
||||
numRepos := 0
|
||||
numPRs := 0
|
||||
numPRsUpdated := 0
|
||||
results, err := iterateRepositories(func(repo *models.Repository) ([]string, error) {
|
||||
numRepos++
|
||||
return iteratePRs(repo, func(repo *models.Repository, pr *models.PullRequest) ([]string, error) {
|
||||
numPRs++
|
||||
results := []string{}
|
||||
pr.BaseRepo = repo
|
||||
repoPath := repo.RepoPath()
|
||||
|
||||
oldMergeBase := pr.MergeBase
|
||||
|
||||
if !pr.HasMerged {
|
||||
var err error
|
||||
pr.MergeBase, err = git.NewCommand("merge-base", "--", pr.BaseBranch, pr.GetGitRefName()).RunInDir(repoPath)
|
||||
if err != nil {
|
||||
var err2 error
|
||||
pr.MergeBase, err2 = git.NewCommand("rev-parse", git.BranchPrefix+pr.BaseBranch).RunInDir(repoPath)
|
||||
if err2 != nil {
|
||||
results = append(results, fmt.Sprintf("WARN: Unable to get merge base for PR ID %d, #%d onto %s in %s/%s", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name))
|
||||
log.Error("Unable to get merge base for PR ID %d, Index %d in %s/%s. Error: %v & %v", pr.ID, pr.Index, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err, err2)
|
||||
return results, nil
|
||||
}
|
||||
}
|
||||
} else {
|
||||
parentsString, err := git.NewCommand("rev-list", "--parents", "-n", "1", pr.MergedCommitID).RunInDir(repoPath)
|
||||
if err != nil {
|
||||
results = append(results, fmt.Sprintf("WARN: Unable to get parents for merged PR ID %d, #%d onto %s in %s/%s", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name))
|
||||
log.Error("Unable to get parents for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err)
|
||||
return results, nil
|
||||
}
|
||||
parents := strings.Split(strings.TrimSpace(parentsString), " ")
|
||||
if len(parents) < 2 {
|
||||
return results, nil
|
||||
}
|
||||
|
||||
args := append([]string{"merge-base", "--"}, parents[1:]...)
|
||||
args = append(args, pr.GetGitRefName())
|
||||
|
||||
pr.MergeBase, err = git.NewCommand(args...).RunInDir(repoPath)
|
||||
if err != nil {
|
||||
results = append(results, fmt.Sprintf("WARN: Unable to get merge base for merged PR ID %d, #%d onto %s in %s/%s", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name))
|
||||
log.Error("Unable to get merge base for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err)
|
||||
return results, nil
|
||||
}
|
||||
}
|
||||
pr.MergeBase = strings.TrimSpace(pr.MergeBase)
|
||||
if pr.MergeBase != oldMergeBase {
|
||||
if ctx.Bool("fix") {
|
||||
if err := pr.UpdateCols("merge_base"); err != nil {
|
||||
return results, err
|
||||
}
|
||||
} else {
|
||||
results = append(results, fmt.Sprintf("#%d onto %s in %s/%s: MergeBase should be %s but is %s", pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, oldMergeBase, pr.MergeBase))
|
||||
}
|
||||
numPRsUpdated++
|
||||
}
|
||||
return results, nil
|
||||
})
|
||||
})
|
||||
|
||||
if ctx.Bool("fix") {
|
||||
results = append(results, fmt.Sprintf("%d PR mergebases updated of %d PRs total in %d repos", numPRsUpdated, numPRs, numRepos))
|
||||
} else {
|
||||
if numPRsUpdated > 0 && err == nil {
|
||||
return results, fmt.Errorf("%d PRs with incorrect mergebases of %d PRs total in %d repos", numPRsUpdated, numPRs, numRepos)
|
||||
}
|
||||
results = append(results, fmt.Sprintf("%d PRs with incorrect mergebases of %d PRs total in %d repos", numPRsUpdated, numPRs, numRepos))
|
||||
}
|
||||
|
||||
return results, err
|
||||
}
|
||||
|
||||
func runDoctorScriptType(ctx *cli.Context) ([]string, error) {
|
||||
path, err := exec.LookPath(setting.ScriptType)
|
||||
if err != nil {
|
||||
return []string{fmt.Sprintf("ScriptType %s is not on the current PATH", setting.ScriptType)}, err
|
||||
}
|
||||
return []string{fmt.Sprintf("ScriptType %s is on the current PATH at %s", setting.ScriptType, path)}, nil
|
||||
}
|
||||
|
||||
func runDoctorCheckDBConsistency(ctx *cli.Context) ([]string, error) {
|
||||
var results []string
|
||||
|
||||
// make sure DB version is uptodate
|
||||
if err := models.NewEngine(context.Background(), migrations.EnsureUpToDate); err != nil {
|
||||
return nil, fmt.Errorf("model version on the database does not match the current Gitea version. Model consistency will not be checked until the database is upgraded")
|
||||
}
|
||||
|
||||
//find tracked times without existing issues/pulls
|
||||
count, err := models.CountOrphanedObjects("tracked_time", "issue", "tracked_time.issue_id=issue.id")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if count > 0 {
|
||||
if ctx.Bool("fix") {
|
||||
if err = models.DeleteOrphanedObjects("tracked_time", "issue", "tracked_time.issue_id=issue.id"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
results = append(results, fmt.Sprintf("%d tracked times without existing issue deleted", count))
|
||||
} else {
|
||||
results = append(results, fmt.Sprintf("%d tracked times without existing issue", count))
|
||||
}
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
|
@ -151,8 +151,10 @@ func runDump(ctx *cli.Context) error {
|
|||
}
|
||||
}
|
||||
|
||||
if err := z.AddDir("log", setting.LogRootPath); err != nil {
|
||||
fatal("Failed to include log: %v", err)
|
||||
if com.IsExist(setting.LogRootPath) {
|
||||
if err := z.AddDir("log", setting.LogRootPath); err != nil {
|
||||
fatal("Failed to include log: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
if err = z.Close(); err != nil {
|
||||
|
|
142
cmd/hook.go
142
cmd/hook.go
|
@ -8,15 +8,18 @@ import (
|
|||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/private"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"github.com/urfave/cli"
|
||||
)
|
||||
|
@ -58,6 +61,78 @@ var (
|
|||
}
|
||||
)
|
||||
|
||||
type delayWriter struct {
|
||||
internal io.Writer
|
||||
buf *bytes.Buffer
|
||||
timer *time.Timer
|
||||
}
|
||||
|
||||
func newDelayWriter(internal io.Writer, delay time.Duration) *delayWriter {
|
||||
timer := time.NewTimer(delay)
|
||||
return &delayWriter{
|
||||
internal: internal,
|
||||
buf: &bytes.Buffer{},
|
||||
timer: timer,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *delayWriter) Write(p []byte) (n int, err error) {
|
||||
if d.buf != nil {
|
||||
select {
|
||||
case <-d.timer.C:
|
||||
_, err := d.internal.Write(d.buf.Bytes())
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
d.buf = nil
|
||||
return d.internal.Write(p)
|
||||
default:
|
||||
return d.buf.Write(p)
|
||||
}
|
||||
}
|
||||
return d.internal.Write(p)
|
||||
}
|
||||
|
||||
func (d *delayWriter) WriteString(s string) (n int, err error) {
|
||||
if d.buf != nil {
|
||||
select {
|
||||
case <-d.timer.C:
|
||||
_, err := d.internal.Write(d.buf.Bytes())
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
d.buf = nil
|
||||
return d.internal.Write([]byte(s))
|
||||
default:
|
||||
return d.buf.WriteString(s)
|
||||
}
|
||||
}
|
||||
return d.internal.Write([]byte(s))
|
||||
}
|
||||
|
||||
func (d *delayWriter) Close() error {
|
||||
if d == nil {
|
||||
return nil
|
||||
}
|
||||
stopped := util.StopTimer(d.timer)
|
||||
if stopped || d.buf == nil {
|
||||
return nil
|
||||
}
|
||||
_, err := d.internal.Write(d.buf.Bytes())
|
||||
d.buf = nil
|
||||
return err
|
||||
}
|
||||
|
||||
type nilWriter struct{}
|
||||
|
||||
func (n *nilWriter) Write(p []byte) (int, error) {
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
func (n *nilWriter) WriteString(s string) (int, error) {
|
||||
return len(s), nil
|
||||
}
|
||||
|
||||
func runHookPreReceive(c *cli.Context) error {
|
||||
if os.Getenv(models.EnvIsInternal) == "true" {
|
||||
return nil
|
||||
|
@ -101,6 +176,18 @@ Gitea or set your environment appropriately.`, "")
|
|||
total := 0
|
||||
lastline := 0
|
||||
|
||||
var out io.Writer
|
||||
out = &nilWriter{}
|
||||
if setting.Git.VerbosePush {
|
||||
if setting.Git.VerbosePushDelay > 0 {
|
||||
dWriter := newDelayWriter(os.Stdout, setting.Git.VerbosePushDelay)
|
||||
defer dWriter.Close()
|
||||
out = dWriter
|
||||
} else {
|
||||
out = os.Stdout
|
||||
}
|
||||
}
|
||||
|
||||
for scanner.Scan() {
|
||||
// TODO: support news feeds for wiki
|
||||
if isWiki {
|
||||
|
@ -124,12 +211,10 @@ Gitea or set your environment appropriately.`, "")
|
|||
newCommitIDs[count] = newCommitID
|
||||
refFullNames[count] = refFullName
|
||||
count++
|
||||
fmt.Fprintf(os.Stdout, "*")
|
||||
os.Stdout.Sync()
|
||||
fmt.Fprintf(out, "*")
|
||||
|
||||
if count >= hookBatchSize {
|
||||
fmt.Fprintf(os.Stdout, " Checking %d branches\n", count)
|
||||
os.Stdout.Sync()
|
||||
fmt.Fprintf(out, " Checking %d branches\n", count)
|
||||
|
||||
hookOptions.OldCommitIDs = oldCommitIDs
|
||||
hookOptions.NewCommitIDs = newCommitIDs
|
||||
|
@ -147,12 +232,10 @@ Gitea or set your environment appropriately.`, "")
|
|||
lastline = 0
|
||||
}
|
||||
} else {
|
||||
fmt.Fprintf(os.Stdout, ".")
|
||||
os.Stdout.Sync()
|
||||
fmt.Fprintf(out, ".")
|
||||
}
|
||||
if lastline >= hookBatchSize {
|
||||
fmt.Fprintf(os.Stdout, "\n")
|
||||
os.Stdout.Sync()
|
||||
fmt.Fprintf(out, "\n")
|
||||
lastline = 0
|
||||
}
|
||||
}
|
||||
|
@ -162,8 +245,7 @@ Gitea or set your environment appropriately.`, "")
|
|||
hookOptions.NewCommitIDs = newCommitIDs[:count]
|
||||
hookOptions.RefFullNames = refFullNames[:count]
|
||||
|
||||
fmt.Fprintf(os.Stdout, " Checking %d branches\n", count)
|
||||
os.Stdout.Sync()
|
||||
fmt.Fprintf(out, " Checking %d branches\n", count)
|
||||
|
||||
statusCode, msg := private.HookPreReceive(username, reponame, hookOptions)
|
||||
switch statusCode {
|
||||
|
@ -173,14 +255,11 @@ Gitea or set your environment appropriately.`, "")
|
|||
fail(msg, "")
|
||||
}
|
||||
} else if lastline > 0 {
|
||||
fmt.Fprintf(os.Stdout, "\n")
|
||||
os.Stdout.Sync()
|
||||
fmt.Fprintf(out, "\n")
|
||||
lastline = 0
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stdout, "Checked %d references in total\n", total)
|
||||
os.Stdout.Sync()
|
||||
|
||||
fmt.Fprintf(out, "Checked %d references in total\n", total)
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -206,6 +285,19 @@ Gitea or set your environment appropriately.`, "")
|
|||
}
|
||||
}
|
||||
|
||||
var out io.Writer
|
||||
var dWriter *delayWriter
|
||||
out = &nilWriter{}
|
||||
if setting.Git.VerbosePush {
|
||||
if setting.Git.VerbosePushDelay > 0 {
|
||||
dWriter = newDelayWriter(os.Stdout, setting.Git.VerbosePushDelay)
|
||||
defer dWriter.Close()
|
||||
out = dWriter
|
||||
} else {
|
||||
out = os.Stdout
|
||||
}
|
||||
}
|
||||
|
||||
// the environment setted on serv command
|
||||
repoUser := os.Getenv(models.EnvRepoUsername)
|
||||
isWiki := (os.Getenv(models.EnvRepoIsWiki) == "true")
|
||||
|
@ -241,7 +333,7 @@ Gitea or set your environment appropriately.`, "")
|
|||
continue
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stdout, ".")
|
||||
fmt.Fprintf(out, ".")
|
||||
oldCommitIDs[count] = string(fields[0])
|
||||
newCommitIDs[count] = string(fields[1])
|
||||
refFullNames[count] = string(fields[2])
|
||||
|
@ -250,16 +342,15 @@ Gitea or set your environment appropriately.`, "")
|
|||
}
|
||||
count++
|
||||
total++
|
||||
os.Stdout.Sync()
|
||||
|
||||
if count >= hookBatchSize {
|
||||
fmt.Fprintf(os.Stdout, " Processing %d references\n", count)
|
||||
os.Stdout.Sync()
|
||||
fmt.Fprintf(out, " Processing %d references\n", count)
|
||||
hookOptions.OldCommitIDs = oldCommitIDs
|
||||
hookOptions.NewCommitIDs = newCommitIDs
|
||||
hookOptions.RefFullNames = refFullNames
|
||||
resp, err := private.HookPostReceive(repoUser, repoName, hookOptions)
|
||||
if resp == nil {
|
||||
_ = dWriter.Close()
|
||||
hookPrintResults(results)
|
||||
fail("Internal Server Error", err)
|
||||
}
|
||||
|
@ -277,9 +368,9 @@ Gitea or set your environment appropriately.`, "")
|
|||
fail("Internal Server Error", "SetDefaultBranch failed with Error: %v", err)
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(os.Stdout, "Processed %d references in total\n", total)
|
||||
os.Stdout.Sync()
|
||||
fmt.Fprintf(out, "Processed %d references in total\n", total)
|
||||
|
||||
_ = dWriter.Close()
|
||||
hookPrintResults(results)
|
||||
return nil
|
||||
}
|
||||
|
@ -288,19 +379,18 @@ Gitea or set your environment appropriately.`, "")
|
|||
hookOptions.NewCommitIDs = newCommitIDs[:count]
|
||||
hookOptions.RefFullNames = refFullNames[:count]
|
||||
|
||||
fmt.Fprintf(os.Stdout, " Processing %d references\n", count)
|
||||
os.Stdout.Sync()
|
||||
fmt.Fprintf(out, " Processing %d references\n", count)
|
||||
|
||||
resp, err := private.HookPostReceive(repoUser, repoName, hookOptions)
|
||||
if resp == nil {
|
||||
_ = dWriter.Close()
|
||||
hookPrintResults(results)
|
||||
fail("Internal Server Error", err)
|
||||
}
|
||||
wasEmpty = wasEmpty || resp.RepoWasEmpty
|
||||
results = append(results, resp.Results...)
|
||||
|
||||
fmt.Fprintf(os.Stdout, "Processed %d references in total\n", total)
|
||||
os.Stdout.Sync()
|
||||
fmt.Fprintf(out, "Processed %d references in total\n", total)
|
||||
|
||||
if wasEmpty && masterPushed {
|
||||
// We need to tell the repo to reset the default branch to master
|
||||
|
@ -309,7 +399,7 @@ Gitea or set your environment appropriately.`, "")
|
|||
fail("Internal Server Error", "SetDefaultBranch failed with Error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
_ = dWriter.Close()
|
||||
hookPrintResults(results)
|
||||
|
||||
return nil
|
||||
|
|
|
@ -12,6 +12,7 @@ import (
|
|||
"net/url"
|
||||
"os"
|
||||
"os/exec"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
@ -72,6 +73,7 @@ var (
|
|||
"git-receive-pack": models.AccessModeWrite,
|
||||
lfsAuthenticateVerb: models.AccessModeNone,
|
||||
}
|
||||
alphaDashDotPattern = regexp.MustCompile(`[^\w-\.]`)
|
||||
)
|
||||
|
||||
func fail(userMessage, logMessage string, args ...interface{}) {
|
||||
|
@ -147,6 +149,10 @@ func runServ(c *cli.Context) error {
|
|||
username := strings.ToLower(rr[0])
|
||||
reponame := strings.ToLower(strings.TrimSuffix(rr[1], ".git"))
|
||||
|
||||
if alphaDashDotPattern.MatchString(reponame) {
|
||||
fail("Invalid repo name", "Invalid repo name: %s", reponame)
|
||||
}
|
||||
|
||||
if setting.EnablePprof || c.Bool("enable-pprof") {
|
||||
if err := os.MkdirAll(setting.PprofDataPath, os.ModePerm); err != nil {
|
||||
fail("Error while trying to create PPROF_DATA_PATH", "Error while trying to create PPROF_DATA_PATH: %v", err)
|
||||
|
|
|
@ -28,11 +28,11 @@ import (
|
|||
"code.gitea.io/gitea/routers"
|
||||
"code.gitea.io/gitea/routers/routes"
|
||||
|
||||
"github.com/go-git/go-git/v5"
|
||||
"github.com/go-git/go-git/v5/config"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
context2 "github.com/gorilla/context"
|
||||
"github.com/unknwon/com"
|
||||
"gopkg.in/src-d/go-git.v4"
|
||||
"gopkg.in/src-d/go-git.v4/config"
|
||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
||||
"gopkg.in/testfixtures.v2"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
|
|
@ -275,8 +275,9 @@ DISABLE_ROUTER_LOG = false
|
|||
; not forget to export the private key):
|
||||
; $ openssl pkcs12 -in cert.pfx -out cert.pem -nokeys
|
||||
; $ openssl pkcs12 -in cert.pfx -out key.pem -nocerts -nodes
|
||||
CERT_FILE = custom/https/cert.pem
|
||||
KEY_FILE = custom/https/key.pem
|
||||
; Paths are relative to CUSTOM_PATH
|
||||
CERT_FILE = https/cert.pem
|
||||
KEY_FILE = https/key.pem
|
||||
; Root directory containing templates and static files.
|
||||
; default is the path where Gitea is executed
|
||||
STATIC_ROOT_PATH =
|
||||
|
|
|
@ -18,7 +18,7 @@ params:
|
|||
description: Git with a cup of tea
|
||||
author: The Gitea Authors
|
||||
website: https://docs.gitea.io
|
||||
version: 1.10.2
|
||||
version: 1.11.5
|
||||
|
||||
outputs:
|
||||
home:
|
||||
|
|
|
@ -181,8 +181,8 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`.
|
|||
- `SSH_LISTEN_PORT`: **%(SSH\_PORT)s**: Port for the built-in SSH server.
|
||||
- `OFFLINE_MODE`: **false**: Disables use of CDN for static files and Gravatar for profile pictures.
|
||||
- `DISABLE_ROUTER_LOG`: **false**: Mute printing of the router log.
|
||||
- `CERT_FILE`: **custom/https/cert.pem**: Cert file path used for HTTPS.
|
||||
- `KEY_FILE`: **custom/https/key.pem**: Key file path used for HTTPS.
|
||||
- `CERT_FILE`: **https/cert.pem**: Cert file path used for HTTPS. From 1.11 paths are relative to `CUSTOM_PATH`.
|
||||
- `KEY_FILE`: **https/key.pem**: Key file path used for HTTPS. From 1.11 paths are relative to `CUSTOM_PATH`.
|
||||
- `STATIC_ROOT_PATH`: **./**: Upper level of template and static files path.
|
||||
- `STATIC_CACHE_TIME`: **6h**: Web browser cache time for static resources on `custom/`, `public/` and all uploaded avatars.
|
||||
- `ENABLE_GZIP`: **false**: Enables application-level GZIP support.
|
||||
|
@ -522,6 +522,8 @@ NB: You must `REDIRECT_MACARON_LOG` and have `DISABLE_ROUTER_LOG` set to `false`
|
|||
- `MAX_GIT_DIFF_FILES`: **100**: Max number of files shown in diff view.
|
||||
- `GC_ARGS`: **\<empty\>**: Arguments for command `git gc`, e.g. `--aggressive --auto`. See more on http://git-scm.com/docs/git-gc/
|
||||
- `ENABLE_AUTO_GIT_WIRE_PROTOCOL`: **true**: If use git wire protocol version 2 when git version >= 2.18, default is true, set to false when you always want git wire protocol version 1
|
||||
- `VERBOSE_PUSH`: **true**: Print status information about pushes as they are being processed.
|
||||
- `VERBOSE_PUSH_DELAY`: **5s**: Only print verbose information if push takes longer than this delay.
|
||||
|
||||
## Git - Timeout settings (`git.timeout`)
|
||||
- `DEFAUlT`: **360**: Git operations default timeout seconds.
|
||||
|
|
|
@ -60,7 +60,7 @@ _Symbols used in table:_
|
|||
| Git LFS 2.0 | ✓ | ✘ | ✓ | ✓ | ✓ | ⁄ | ✓ |
|
||||
| Group Milestones | ✘ | ✘ | ✘ | ✓ | ✓ | ✘ | ✘ |
|
||||
| Granular user roles (Code, Issues, Wiki etc) | ✓ | ✘ | ✘ | ✓ | ✓ | ✘ | ✘ |
|
||||
| Verified Committer | ✘ | ✘ | ? | ✓ | ✓ | ✓ | ✘ |
|
||||
| Verified Committer | ⁄ | ✘ | ? | ✓ | ✓ | ✓ | ✘ |
|
||||
| GPG Signed Commits | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| Reject unsigned commits | [✘](https://github.com/go-gitea/gitea/issues/2770) | ✘ | ✓ | ✓ | ✓ | ✘ | ✓ |
|
||||
| Repository Activity page | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
|
|
|
@ -114,6 +114,17 @@ recommended way to build from source is therefore:
|
|||
TAGS="bindata sqlite sqlite_unlock_notify" make build
|
||||
```
|
||||
|
||||
The `build` target is split into two sub-targets:
|
||||
|
||||
- `make backend` which requires [Go 1.11](https://golang.org/dl/) or greater.
|
||||
- `make frontend` which requires [Node.js 10.0.0](https://nodejs.org/en/download/) or greater.
|
||||
|
||||
If pre-built frontend files are present it is possible to only build the backend:
|
||||
|
||||
```bash
|
||||
TAGS="bindata" make backend
|
||||
``
|
||||
|
||||
## Test
|
||||
|
||||
After following the steps above, a `gitea` binary will be available in the working directory.
|
||||
|
|
|
@ -136,7 +136,8 @@ the `!` marker to identify pull requests. For example:
|
|||
> This is pull request [!1234](#), and links to a pull request in Gitea.
|
||||
|
||||
The `!` and `#` can be used interchangeably for issues and pull request _except_
|
||||
for this case, where a distinction is required.
|
||||
for this case, where a distinction is required. If the repository uses external
|
||||
tracker, commit message for squash merge will use `!` as reference by default.
|
||||
|
||||
## Issues and Pull Requests References Summary
|
||||
|
||||
|
|
14
go.mod
14
go.mod
|
@ -41,6 +41,8 @@ require (
|
|||
github.com/facebookgo/subset v0.0.0-20150612182917-8dac2c3c4870 // indirect
|
||||
github.com/gliderlabs/ssh v0.2.2
|
||||
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a // indirect
|
||||
github.com/go-git/go-billy/v5 v5.0.0
|
||||
github.com/go-git/go-git/v5 v5.0.0
|
||||
github.com/go-openapi/jsonreference v0.19.3 // indirect
|
||||
github.com/go-redis/redis v6.15.2+incompatible
|
||||
github.com/go-sql-driver/mysql v1.4.1
|
||||
|
@ -62,7 +64,7 @@ require (
|
|||
github.com/lib/pq v1.2.0
|
||||
github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96
|
||||
github.com/mailru/easyjson v0.7.0 // indirect
|
||||
github.com/markbates/goth v1.56.0
|
||||
github.com/markbates/goth v1.61.2
|
||||
github.com/mattn/go-isatty v0.0.7
|
||||
github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d // indirect
|
||||
github.com/mattn/go-sqlite3 v1.11.0
|
||||
|
@ -80,7 +82,7 @@ require (
|
|||
github.com/quasoft/websspi v1.0.0
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 // indirect
|
||||
github.com/satori/go.uuid v1.2.0
|
||||
github.com/sergi/go-diff v1.0.0
|
||||
github.com/sergi/go-diff v1.1.0
|
||||
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b // indirect
|
||||
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd
|
||||
github.com/steveyen/gtreap v0.0.0-20150807155958-0abe01ef9be2 // indirect
|
||||
|
@ -95,10 +97,10 @@ require (
|
|||
github.com/yohcop/openid-go v0.0.0-20160914080427-2c050d2dae53
|
||||
github.com/yuin/goldmark v1.1.19
|
||||
go.etcd.io/bbolt v1.3.3 // indirect
|
||||
golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876
|
||||
golang.org/x/net v0.0.0-20191101175033-0deb6923b6d9
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073
|
||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
|
||||
golang.org/x/sys v0.0.0-20191127021746-63cb32ae39b2
|
||||
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527
|
||||
golang.org/x/text v0.3.2
|
||||
golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935 // indirect
|
||||
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
|
||||
|
@ -106,8 +108,6 @@ require (
|
|||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
|
||||
gopkg.in/ini.v1 v1.51.1
|
||||
gopkg.in/ldap.v3 v3.0.2
|
||||
gopkg.in/src-d/go-billy.v4 v4.3.2
|
||||
gopkg.in/src-d/go-git.v4 v4.13.1
|
||||
gopkg.in/testfixtures.v2 v2.5.0
|
||||
mvdan.cc/xurls/v2 v2.1.0
|
||||
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251
|
||||
|
|
59
go.sum
59
go.sum
|
@ -110,7 +110,7 @@ github.com/couchbase/vellum v0.0.0-20190829182332-ef2e028c01fd/go.mod h1:xbc8Ff/
|
|||
github.com/couchbaselabs/go-couchbase v0.0.0-20190708161019-23e7ca2ce2b7 h1:1XjEY/gnjQ+AfXef2U6dxCquhiRzkEpxZuWqs+QxTL8=
|
||||
github.com/couchbaselabs/go-couchbase v0.0.0-20190708161019-23e7ca2ce2b7/go.mod h1:mby/05p8HE5yHEAKiIH/555NoblMs7PtW6NrYshDruc=
|
||||
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
|
||||
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/cupcake/rdb v0.0.0-20161107195141-43ba34106c76/go.mod h1:vYwsqCOLxGiisLwp9rITslkFNpZD5rz43tf41QFkTWY=
|
||||
github.com/cznic/b v0.0.0-20181122101859-a26611c4d92d h1:SwD98825d6bdB+pEuTxWOXiSjBrHdOl/UVp75eI7JT8=
|
||||
github.com/cznic/b v0.0.0-20181122101859-a26611c4d92d/go.mod h1:URriBxXwVq5ijiJ12C7iIZqlA69nTlI+LgI6/pwftG8=
|
||||
|
@ -164,6 +164,14 @@ github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a h1:FQqo
|
|||
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
||||
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31 h1:gclg6gY70GLy3PbkQ1AERPfmLMMagS60DKF78eWwLn8=
|
||||
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
|
||||
github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4=
|
||||
github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E=
|
||||
github.com/go-git/go-billy/v5 v5.0.0 h1:7NQHvd9FVid8VL4qVUMm8XifBK+2xCoZ2lSk0agRrHM=
|
||||
github.com/go-git/go-billy/v5 v5.0.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0=
|
||||
github.com/go-git/go-git-fixtures/v4 v4.0.1 h1:q+IFMfLx200Q3scvt2hN79JsEzy4AmBTp/pqnefH+Bc=
|
||||
github.com/go-git/go-git-fixtures/v4 v4.0.1/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw=
|
||||
github.com/go-git/go-git/v5 v5.0.0 h1:k5RWPm4iJwYtfWoxIJy4wJX9ON7ihPeZZYC1fLYDnpg=
|
||||
github.com/go-git/go-git/v5 v5.0.0/go.mod h1:oYD8y9kWsGINPFJoLdaScGCN6dlKg23blmClfZwtUVA=
|
||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||
|
@ -346,11 +354,13 @@ github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
|||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
|
||||
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/lafriks/xormstore v1.3.2 h1:hqi3F8s/B4rz8GuEZZDuHuOxRjeuOpEI/cC7vcnWwH4=
|
||||
github.com/lafriks/xormstore v1.3.2/go.mod h1:mVNIwIa25QIr8rfR7YlVjrqN/apswHkVdtLCyVYBzXw=
|
||||
github.com/lestrrat-go/jwx v0.9.0/go.mod h1:iEoxlYfZjvoGpuWwxUz+eR5e6KTJGsaRcy/YNA/UnBk=
|
||||
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
|
||||
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
|
@ -370,8 +380,8 @@ github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN
|
|||
github.com/mailru/easyjson v0.7.0 h1:aizVhC/NAAcKWb+5QsU1iNOZb4Yws5UO2I+aIprQITM=
|
||||
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
|
||||
github.com/markbates/going v1.0.0/go.mod h1:I6mnB4BPnEeqo85ynXIx1ZFLLbtiLHNXVgWeFO9OGOA=
|
||||
github.com/markbates/goth v1.56.0 h1:XEYedCgMNz5pi3ojXI8z2XUmXtBnMeuKUpx4Z6HlNj8=
|
||||
github.com/markbates/goth v1.56.0/go.mod h1:zZmAw0Es0Dpm7TT/4AdN14QrkiWLMrrU9Xei1o+/mdA=
|
||||
github.com/markbates/goth v1.61.2 h1:jDowrUH5qw8KGuQdKwFhLzkXkTYCIPfz3LHADJsiPIs=
|
||||
github.com/markbates/goth v1.61.2/go.mod h1:qh2QfwZoWRucQ+DR5KVKC6dUGkNCToWh4vS45GIzFsY=
|
||||
github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
|
||||
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
||||
github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d h1:m+dSK37rFf2fqppZhg15yI2IwC9BtucBiRwSDm9VL8g=
|
||||
|
@ -402,6 +412,8 @@ github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc/go.mod h1:np1wUFZ6ty
|
|||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5 h1:BvoENQQU+fZ9uukda/RzCAL/191HHwJA5b13R6diVlY=
|
||||
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/niklasfasching/go-org v0.1.8 h1:Kjvs6lP+LIILHhc9zIJ4Gu90a/pVY483if2Qmu8v4Fg=
|
||||
github.com/niklasfasching/go-org v0.1.8/go.mod h1:AsLD6X7djzRIz4/RFZu8vwRL0VGjUvGZCCH1Nz0VdrU=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
|
@ -416,7 +428,6 @@ github.com/onsi/gomega v1.5.0 h1:izbySO9zDPmjJ8rDjLvkA2zJHIo+HkYXHnf7eN7SSyo=
|
|||
github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
|
||||
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
|
||||
github.com/pelletier/go-buffruneio v0.2.0/go.mod h1:JkE26KsDizTr40EUHkXVtNPvgGtbSNq5BcowyYOWdKo=
|
||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||
github.com/pelletier/go-toml v1.4.0 h1:u3Z1r+oOXJIkxqw34zVhyPgjBsm6X2wn21NWs/HfSeg=
|
||||
github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo=
|
||||
|
@ -462,12 +473,11 @@ github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqn
|
|||
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 h1:YDeskXpkNDhPdWN3REluVa46HQOVuVkjkd2sWnrABNQ=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
|
||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||
github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=
|
||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b h1:4kg1wyftSKxLtnPAvcRWakIPpokB9w780/KwrNLnfPA=
|
||||
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd h1:ug7PpSOB5RBPK1Kg6qskGBoP3Vnj/aNYFTznWvlkGo0=
|
||||
|
@ -502,13 +512,10 @@ github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnIn
|
|||
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
||||
github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU=
|
||||
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
|
||||
github.com/src-d/gcfg v1.4.0 h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4=
|
||||
github.com/src-d/gcfg v1.4.0/go.mod h1:p/UMsR43ujA89BJY9duynAwIpvqEujIH/jFlfL7jWoI=
|
||||
github.com/steveyen/gtreap v0.0.0-20150807155958-0abe01ef9be2 h1:JNEGSiWg6D3lcBCMCBqN3ELniXujt+0QNHLhNnO0w3s=
|
||||
github.com/steveyen/gtreap v0.0.0-20150807155958-0abe01ef9be2/go.mod h1:mjqs7N0Q6m5HpR7QfXVBZXZWSqTjQLeTujjA/xUp2uw=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=
|
||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
|
@ -532,7 +539,6 @@ github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGr
|
|||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||
github.com/unknwon/cae v0.0.0-20190822084630-55a0b64484a1 h1:SpoCl3+Pta5/ubQyF+Fmx65obtpfkyzeaOIneCE3MTw=
|
||||
github.com/unknwon/cae v0.0.0-20190822084630-55a0b64484a1/go.mod h1:QaSeRctcea9fK6piJpAMCCPKxzJ01+xFcr2k1m3WRPU=
|
||||
github.com/unknwon/com v0.0.0-20190804042917-757f69c95f3e h1:GSGeB9EAKY2spCABz6xOX5DbxZEXolK+nBSvmsQwRjM=
|
||||
github.com/unknwon/com v0.0.0-20190804042917-757f69c95f3e/go.mod h1:tOOxU81rwgoCLoOVVPHb6T/wt8HZygqH5id+GNnlCXM=
|
||||
github.com/unknwon/com v1.0.1 h1:3d1LTxD+Lnf3soQiD4Cp/0BRB+Rsa/+RTvz8GMMzIXs=
|
||||
github.com/unknwon/com v1.0.1/go.mod h1:tOOxU81rwgoCLoOVVPHb6T/wt8HZygqH5id+GNnlCXM=
|
||||
|
@ -578,11 +584,10 @@ golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8U
|
|||
golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190907121410-71b5226ff739/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad h1:5E5raQxcv+6CZ11RrBYQe5WRbUIWpScjh0kvHZkZIrQ=
|
||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876 h1:sKJQZMuxjOAR/Uo2LBfU90onWEf1dF4C+0hPJCc9Mpc=
|
||||
golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073 h1:xMPOj6Pz6UipU1wXLkrtqpHbR0AVFnyPEQq/wRWz9lM=
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
|
@ -614,8 +619,8 @@ golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLL
|
|||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20191101175033-0deb6923b6d9 h1:DPz9iiH3YoKiKhX/ijjoZvT0VFwK2c6CWYWQ7Zyr8TU=
|
||||
golang.org/x/net v0.0.0-20191101175033-0deb6923b6d9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a h1:GuSPYbZzB5/dcLNCwLQLsg3obCJtX9IJhpXkvY7kzk0=
|
||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/oauth2 v0.0.0-20180620175406-ef147856a6dd/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
|
@ -645,15 +650,13 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190730183949-1393eb018365/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190907184412-d223b2b6db03/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191010194322-b09406accb47 h1:/XfQ9z7ib8eEJX2hdgFTZJ/ntt0swNk5oYBziWeTCvY=
|
||||
golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191127021746-63cb32ae39b2 h1:/J2nHFg1MTqaRLFO7M+J78ASNsJoz3r0cvHBPQ77fsE=
|
||||
golang.org/x/sys v0.0.0-20191127021746-63cb32ae39b2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So=
|
||||
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
||||
|
@ -675,7 +678,6 @@ golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgw
|
|||
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190617190820-da514acc4774/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190729092621-ff9f1409240a/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
|
||||
golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935 h1:kJQZhwFzSwJS2BxboKjdZzWczQOZx8VuH7Y8hhuGUtM=
|
||||
golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
|
@ -715,8 +717,9 @@ gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175 h1:nn6Zav2sOQHCFJHEspya8
|
|||
gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AWRXxgwEyPp2z+p0+hgMuE=
|
||||
|
@ -731,12 +734,6 @@ gopkg.in/ldap.v3 v3.0.2 h1:R6RBtabK6e1GO0eQKtkyOFbAHO73QesLzI2w2DZ6b9w=
|
|||
gopkg.in/ldap.v3 v3.0.2/go.mod h1:oxD7NyBuxchC+SgJDE1Q5Od05eGt29SDQVBmV+HYbzw=
|
||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||
gopkg.in/square/go-jose.v2 v2.3.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
|
||||
gopkg.in/src-d/go-billy.v4 v4.3.2 h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg=
|
||||
gopkg.in/src-d/go-billy.v4 v4.3.2/go.mod h1:nDjArDMp+XMs1aFAESLRjfGSgfvoYN0hDfzEk0GjC98=
|
||||
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0 h1:ivZFOIltbce2Mo8IjzUHAFoq/IylO9WHhNOAJK+LsJg=
|
||||
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0/go.mod h1:dLBcvytrw/TYZsNTWCnkNF2DSIlzWYqTe3rJR56Ac7g=
|
||||
gopkg.in/src-d/go-git.v4 v4.13.1 h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE=
|
||||
gopkg.in/src-d/go-git.v4 v4.13.1/go.mod h1:nx5NYcxdKxq5fpltdHnPa2Exj4Sx0EclMWZQbYDu2z8=
|
||||
gopkg.in/testfixtures.v2 v2.5.0 h1:N08B7l2GzFQenyYbzqthDnKAA+cmb17iAZhhFxr7JHw=
|
||||
gopkg.in/testfixtures.v2 v2.5.0/go.mod h1:vyAq+MYCgNpR29qitQdLZhdbLFf4mR/2MFJRFoQZZ2M=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
||||
|
@ -745,8 +742,9 @@ gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
|
|||
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
|
||||
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
|
@ -760,7 +758,6 @@ xorm.io/builder v0.3.6 h1:ha28mQ2M+TFx96Hxo+iq6tQgnkC9IZkM6D8w9sKHHF8=
|
|||
xorm.io/builder v0.3.6/go.mod h1:LEFAPISnRzG+zxaxj2vPicRwz67BdhFreKg8yv8/TgU=
|
||||
xorm.io/core v0.7.2 h1:mEO22A2Z7a3fPaZMk6gKL/jMD80iiyNwRrX5HOv3XLw=
|
||||
xorm.io/core v0.7.2/go.mod h1:jJfd0UAEzZ4t87nbQYtVjmqpIODugN6PD2D9E+dJvdM=
|
||||
xorm.io/xorm v0.8.0 h1:iALxgJrX8O00f8Jk22GbZwPmxJNgssV5Mv4uc2HL9PM=
|
||||
xorm.io/xorm v0.8.0/go.mod h1:ZkJLEYLoVyg7amJK/5r779bHyzs2AU8f8VMiP6BM7uY=
|
||||
xorm.io/xorm v0.8.1 h1:4f2KXuQxVdaX3RdI3Fw81NzMiSpZeyCZt8m3sEVeIkQ=
|
||||
xorm.io/xorm v0.8.1/go.mod h1:ZkJLEYLoVyg7amJK/5r779bHyzs2AU8f8VMiP6BM7uY=
|
||||
|
|
|
@ -28,6 +28,8 @@ func testAPIGetBranch(t *testing.T, branchName string, exists bool) {
|
|||
var branch api.Branch
|
||||
DecodeJSON(t, resp, &branch)
|
||||
assert.EqualValues(t, branchName, branch.Name)
|
||||
assert.True(t, branch.UserCanPush)
|
||||
assert.True(t, branch.UserCanMerge)
|
||||
}
|
||||
|
||||
func TestAPIGetBranch(t *testing.T) {
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package integrations
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
"code.gitea.io/gitea/modules/structs"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestAPIIssuesMilestone(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
|
||||
milestone := models.AssertExistsAndLoadBean(t, &models.Milestone{ID: 1}).(*models.Milestone)
|
||||
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: milestone.RepoID}).(*models.Repository)
|
||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||
assert.Equal(t, int64(1), int64(milestone.NumIssues))
|
||||
assert.Equal(t, structs.StateOpen, milestone.State())
|
||||
|
||||
session := loginUser(t, owner.Name)
|
||||
token := getTokenForLoggedInUser(t, session)
|
||||
|
||||
// update values of issue
|
||||
milestoneState := "closed"
|
||||
|
||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/milestones/%d?token=%s", owner.Name, repo.Name, milestone.ID, token)
|
||||
req := NewRequestWithJSON(t, "PATCH", urlStr, structs.EditMilestoneOption{
|
||||
State: &milestoneState,
|
||||
})
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
var apiMilestone structs.Milestone
|
||||
DecodeJSON(t, resp, &apiMilestone)
|
||||
assert.EqualValues(t, "closed", apiMilestone.State)
|
||||
|
||||
req = NewRequest(t, "GET", urlStr)
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
var apiMilestone2 structs.Milestone
|
||||
DecodeJSON(t, resp, &apiMilestone2)
|
||||
assert.EqualValues(t, "closed", apiMilestone2.State)
|
||||
}
|
|
@ -7,6 +7,7 @@ package integrations
|
|||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
|
@ -38,12 +39,12 @@ func TestAPICreateIssue(t *testing.T) {
|
|||
defer prepareTestEnv(t)()
|
||||
const body, title = "apiTestBody", "apiTestTitle"
|
||||
|
||||
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
|
||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||
repoBefore := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
|
||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repoBefore.OwnerID}).(*models.User)
|
||||
|
||||
session := loginUser(t, owner.Name)
|
||||
token := getTokenForLoggedInUser(t, session)
|
||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues?state=all&token=%s", owner.Name, repo.Name, token)
|
||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues?state=all&token=%s", owner.Name, repoBefore.Name, token)
|
||||
req := NewRequestWithJSON(t, "POST", urlStr, &api.CreateIssueOption{
|
||||
Body: body,
|
||||
Title: title,
|
||||
|
@ -56,19 +57,23 @@ func TestAPICreateIssue(t *testing.T) {
|
|||
assert.Equal(t, apiIssue.Title, title)
|
||||
|
||||
models.AssertExistsAndLoadBean(t, &models.Issue{
|
||||
RepoID: repo.ID,
|
||||
RepoID: repoBefore.ID,
|
||||
AssigneeID: owner.ID,
|
||||
Content: body,
|
||||
Title: title,
|
||||
})
|
||||
|
||||
repoAfter := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
|
||||
assert.Equal(t, repoBefore.NumIssues+1, repoAfter.NumIssues)
|
||||
assert.Equal(t, repoBefore.NumClosedIssues, repoAfter.NumClosedIssues)
|
||||
}
|
||||
|
||||
func TestAPIEditIssue(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
|
||||
issueBefore := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 10}).(*models.Issue)
|
||||
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: issueBefore.RepoID}).(*models.Repository)
|
||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||
repoBefore := models.AssertExistsAndLoadBean(t, &models.Repository{ID: issueBefore.RepoID}).(*models.Repository)
|
||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repoBefore.OwnerID}).(*models.User)
|
||||
assert.NoError(t, issueBefore.LoadAttributes())
|
||||
assert.Equal(t, int64(1019307200), int64(issueBefore.DeadlineUnix))
|
||||
assert.Equal(t, api.StateOpen, issueBefore.State())
|
||||
|
@ -83,7 +88,7 @@ func TestAPIEditIssue(t *testing.T) {
|
|||
body := "new content!"
|
||||
title := "new title from api set"
|
||||
|
||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d?token=%s", owner.Name, repo.Name, issueBefore.Index, token)
|
||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d?token=%s", owner.Name, repoBefore.Name, issueBefore.Index, token)
|
||||
req := NewRequestWithJSON(t, "PATCH", urlStr, api.EditIssueOption{
|
||||
State: &issueState,
|
||||
RemoveDeadline: &removeDeadline,
|
||||
|
@ -98,6 +103,7 @@ func TestAPIEditIssue(t *testing.T) {
|
|||
DecodeJSON(t, resp, &apiIssue)
|
||||
|
||||
issueAfter := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 10}).(*models.Issue)
|
||||
repoAfter := models.AssertExistsAndLoadBean(t, &models.Repository{ID: issueBefore.RepoID}).(*models.Repository)
|
||||
|
||||
// check deleted user
|
||||
assert.Equal(t, int64(500), issueAfter.PosterID)
|
||||
|
@ -106,6 +112,9 @@ func TestAPIEditIssue(t *testing.T) {
|
|||
assert.Equal(t, int64(-1), issueBefore.PosterID)
|
||||
assert.Equal(t, int64(-1), apiIssue.Poster.ID)
|
||||
|
||||
// check repo change
|
||||
assert.Equal(t, repoBefore.NumClosedIssues+1, repoAfter.NumClosedIssues)
|
||||
|
||||
// API response
|
||||
assert.Equal(t, api.StateClosed, apiIssue.State)
|
||||
assert.Equal(t, milestone, apiIssue.Milestone.ID)
|
||||
|
@ -120,3 +129,47 @@ func TestAPIEditIssue(t *testing.T) {
|
|||
assert.Equal(t, body, issueAfter.Content)
|
||||
assert.Equal(t, title, issueAfter.Title)
|
||||
}
|
||||
|
||||
func TestAPISearchIssue(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
|
||||
session := loginUser(t, "user2")
|
||||
token := getTokenForLoggedInUser(t, session)
|
||||
|
||||
link, _ := url.Parse("/api/v1/repos/issues/search")
|
||||
req := NewRequest(t, "GET", link.String())
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
var apiIssues []*api.Issue
|
||||
DecodeJSON(t, resp, &apiIssues)
|
||||
|
||||
assert.Len(t, apiIssues, 8)
|
||||
|
||||
query := url.Values{}
|
||||
query.Add("token", token)
|
||||
link.RawQuery = query.Encode()
|
||||
req = NewRequest(t, "GET", link.String())
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
DecodeJSON(t, resp, &apiIssues)
|
||||
assert.Len(t, apiIssues, 8)
|
||||
|
||||
query.Add("state", "closed")
|
||||
link.RawQuery = query.Encode()
|
||||
req = NewRequest(t, "GET", link.String())
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
DecodeJSON(t, resp, &apiIssues)
|
||||
assert.Len(t, apiIssues, 2)
|
||||
|
||||
query.Set("state", "all")
|
||||
link.RawQuery = query.Encode()
|
||||
req = NewRequest(t, "GET", link.String())
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
DecodeJSON(t, resp, &apiIssues)
|
||||
assert.Len(t, apiIssues, 10) //there are more but 10 is page item limit
|
||||
|
||||
query.Add("page", "2")
|
||||
link.RawQuery = query.Encode()
|
||||
req = NewRequest(t, "GET", link.String())
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
DecodeJSON(t, resp, &apiIssues)
|
||||
assert.Len(t, apiIssues, 0)
|
||||
}
|
||||
|
|
|
@ -60,17 +60,17 @@ func TestAPIDeleteTrackedTime(t *testing.T) {
|
|||
//Deletion not allowed
|
||||
req := NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/issues/%d/times/%d?token=%s", user2.Name, issue2.Repo.Name, issue2.Index, time6.ID, token)
|
||||
session.MakeRequest(t, req, http.StatusForbidden)
|
||||
/* Delete own time <-- ToDo: timout without reason
|
||||
|
||||
time3 := models.AssertExistsAndLoadBean(t, &models.TrackedTime{ID: 3}).(*models.TrackedTime)
|
||||
req = NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/issues/%d/times/%d?token=%s", user2.Name, issue2.Repo.Name, issue2.Index, time3.ID, token)
|
||||
session.MakeRequest(t, req, http.StatusNoContent)
|
||||
//Delete non existing time
|
||||
session.MakeRequest(t, req, http.StatusInternalServerError) */
|
||||
session.MakeRequest(t, req, http.StatusNotFound)
|
||||
|
||||
//Reset time of user 2 on issue 2
|
||||
trackedSeconds, err := models.GetTrackedSeconds(models.FindTrackedTimesOptions{IssueID: 2, UserID: 2})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(3662), trackedSeconds)
|
||||
assert.Equal(t, int64(3661), trackedSeconds)
|
||||
|
||||
req = NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/issues/%d/times?token=%s", user2.Name, issue2.Repo.Name, issue2.Index, token)
|
||||
session.MakeRequest(t, req, http.StatusNoContent)
|
||||
|
|
|
@ -209,13 +209,31 @@ func getRepo(t *testing.T, repoID int64) *models.Repository {
|
|||
func TestAPIViewRepo(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
|
||||
var repo api.Repository
|
||||
|
||||
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo1")
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
var repo api.Repository
|
||||
DecodeJSON(t, resp, &repo)
|
||||
assert.EqualValues(t, 1, repo.ID)
|
||||
assert.EqualValues(t, "repo1", repo.Name)
|
||||
assert.EqualValues(t, 1, repo.Releases)
|
||||
assert.EqualValues(t, 1, repo.OpenIssues)
|
||||
assert.EqualValues(t, 2, repo.OpenPulls)
|
||||
|
||||
req = NewRequest(t, "GET", "/api/v1/repos/user12/repo10")
|
||||
resp = MakeRequest(t, req, http.StatusOK)
|
||||
DecodeJSON(t, resp, &repo)
|
||||
assert.EqualValues(t, 10, repo.ID)
|
||||
assert.EqualValues(t, "repo10", repo.Name)
|
||||
assert.EqualValues(t, 1, repo.OpenPulls)
|
||||
assert.EqualValues(t, 1, repo.Forks)
|
||||
|
||||
req = NewRequest(t, "GET", "/api/v1/repos/user5/repo4")
|
||||
resp = MakeRequest(t, req, http.StatusOK)
|
||||
DecodeJSON(t, resp, &repo)
|
||||
assert.EqualValues(t, 4, repo.ID)
|
||||
assert.EqualValues(t, "repo4", repo.Name)
|
||||
assert.EqualValues(t, 1, repo.Stars)
|
||||
}
|
||||
|
||||
func TestAPIOrgRepos(t *testing.T) {
|
||||
|
|
|
@ -71,7 +71,6 @@ func testGit(t *testing.T, u *url.URL) {
|
|||
t.Run("BranchProtectMerge", doBranchProtectPRMerge(&httpContext, dstPath))
|
||||
t.Run("MergeFork", func(t *testing.T) {
|
||||
t.Run("CreatePRAndMerge", doMergeFork(httpContext, forkedUserCtx, "master", httpContext.Username+":master"))
|
||||
t.Run("DeleteRepository", doAPIDeleteRepository(httpContext))
|
||||
rawTest(t, &forkedUserCtx, little, big, littleLFS, bigLFS)
|
||||
mediaTest(t, &forkedUserCtx, little, big, littleLFS, bigLFS)
|
||||
})
|
||||
|
@ -111,7 +110,6 @@ func testGit(t *testing.T, u *url.URL) {
|
|||
t.Run("BranchProtectMerge", doBranchProtectPRMerge(&sshContext, dstPath))
|
||||
t.Run("MergeFork", func(t *testing.T) {
|
||||
t.Run("CreatePRAndMerge", doMergeFork(sshContext, forkedUserCtx, "master", sshContext.Username+":master"))
|
||||
t.Run("DeleteRepository", doAPIDeleteRepository(sshContext))
|
||||
rawTest(t, &forkedUserCtx, little, big, littleLFS, bigLFS)
|
||||
mediaTest(t, &forkedUserCtx, little, big, littleLFS, bigLFS)
|
||||
})
|
||||
|
@ -351,6 +349,17 @@ func doBranchProtectPRMerge(baseCtx *APITestContext, dstPath string) func(t *tes
|
|||
pr, err = doAPICreatePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, "protected", "unprotected")(t)
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
t.Run("GenerateCommit", func(t *testing.T) {
|
||||
_, err := generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
t.Run("PushToUnprotectedBranch", doGitPushTestRepository(dstPath, "origin", "protected:unprotected-2"))
|
||||
var pr2 api.PullRequest
|
||||
t.Run("CreatePullRequest", func(t *testing.T) {
|
||||
pr2, err = doAPICreatePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, "unprotected", "unprotected-2")(t)
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
t.Run("MergePR2", doAPIMergePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr2.Index))
|
||||
t.Run("MergePR", doAPIMergePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index))
|
||||
t.Run("PullProtected", doGitPull(dstPath, "origin", "protected"))
|
||||
t.Run("ProtectProtectedBranchWhitelist", doProtectBranch(ctx, "protected", baseCtx.Username))
|
||||
|
@ -408,8 +417,62 @@ func doMergeFork(ctx, baseCtx APITestContext, baseBranch, headBranch string) fun
|
|||
pr, err = doAPICreatePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, baseBranch, headBranch)(t)
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
t.Run("EnsureCanSeePull", func(t *testing.T) {
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/files", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/commits", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
})
|
||||
var diffStr string
|
||||
t.Run("GetDiff", func(t *testing.T) {
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d.diff", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
resp := ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
diffStr = resp.Body.String()
|
||||
})
|
||||
t.Run("MergePR", doAPIMergePullRequest(baseCtx, baseCtx.Username, baseCtx.Reponame, pr.Index))
|
||||
|
||||
t.Run("EnsureCanSeePull", func(t *testing.T) {
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/files", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/commits", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
})
|
||||
t.Run("EnsureDiffNoChange", func(t *testing.T) {
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d.diff", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
resp := ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
assert.Equal(t, diffStr, resp.Body.String())
|
||||
})
|
||||
t.Run("DeleteHeadBranch", doBranchDelete(baseCtx, baseCtx.Username, baseCtx.Reponame, headBranch))
|
||||
t.Run("EnsureCanSeePull", func(t *testing.T) {
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/files", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/commits", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
})
|
||||
t.Run("EnsureDiffNoChange", func(t *testing.T) {
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d.diff", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
resp := ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
assert.Equal(t, diffStr, resp.Body.String())
|
||||
})
|
||||
t.Run("DeleteRepository", doAPIDeleteRepository(ctx))
|
||||
t.Run("EnsureCanSeePull", func(t *testing.T) {
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/files", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/commits", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
})
|
||||
t.Run("EnsureDiffNoChange", func(t *testing.T) {
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d.diff", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
resp := ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
assert.Equal(t, diffStr, resp.Body.String())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -422,6 +485,9 @@ func doPushCreate(ctx APITestContext, u *url.URL) func(t *testing.T) {
|
|||
tmpDir, err := ioutil.TempDir("", ctx.Reponame)
|
||||
assert.NoError(t, err)
|
||||
|
||||
_, err = git.NewCommand("clone", u.String()).RunInDir(tmpDir)
|
||||
assert.Error(t, err)
|
||||
|
||||
err = git.InitRepository(tmpDir, false)
|
||||
assert.NoError(t, err)
|
||||
|
||||
|
@ -449,6 +515,13 @@ func doPushCreate(ctx APITestContext, u *url.URL) func(t *testing.T) {
|
|||
_, err = git.NewCommand("remote", "add", "origin", u.String()).RunInDir(tmpDir)
|
||||
assert.NoError(t, err)
|
||||
|
||||
invalidCtx := ctx
|
||||
invalidCtx.Reponame = fmt.Sprintf("invalid/repo-tmp-push-create-%s", u.Scheme)
|
||||
u.Path = invalidCtx.GitPath()
|
||||
|
||||
_, err = git.NewCommand("remote", "add", "invalid", u.String()).RunInDir(tmpDir)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Push to create disabled
|
||||
setting.Repository.EnablePushCreateUser = false
|
||||
_, err = git.NewCommand("push", "origin", "master").RunInDir(tmpDir)
|
||||
|
@ -456,6 +529,12 @@ func doPushCreate(ctx APITestContext, u *url.URL) func(t *testing.T) {
|
|||
|
||||
// Push to create enabled
|
||||
setting.Repository.EnablePushCreateUser = true
|
||||
|
||||
// Invalid repo
|
||||
_, err = git.NewCommand("push", "invalid", "master").RunInDir(tmpDir)
|
||||
assert.Error(t, err)
|
||||
|
||||
// Valid repo
|
||||
_, err = git.NewCommand("push", "origin", "master").RunInDir(tmpDir)
|
||||
assert.NoError(t, err)
|
||||
|
||||
|
@ -466,3 +545,14 @@ func doPushCreate(ctx APITestContext, u *url.URL) func(t *testing.T) {
|
|||
assert.True(t, repo.IsPrivate)
|
||||
}
|
||||
}
|
||||
|
||||
func doBranchDelete(ctx APITestContext, owner, repo, branch string) func(*testing.T) {
|
||||
return func(t *testing.T) {
|
||||
csrf := GetCSRF(t, ctx.Session, fmt.Sprintf("/%s/%s/branches", url.PathEscape(owner), url.PathEscape(repo)))
|
||||
|
||||
req := NewRequestWithValues(t, "POST", fmt.Sprintf("/%s/%s/branches/delete?name=%s", url.PathEscape(owner), url.PathEscape(repo), url.QueryEscape(branch)), map[string]string{
|
||||
"_csrf": csrf,
|
||||
})
|
||||
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
}
|
||||
}
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1 +1 @@
|
|||
0cf15c3f66ec8384480ed9c3cf87c9e97fbb0ec3
|
||||
423313fbd38093bb10d0c8387db9105409c6f196
|
||||
|
|
|
@ -126,7 +126,7 @@ func restoreOldDB(t *testing.T, version string) bool {
|
|||
err := os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
|
||||
assert.NoError(t, err)
|
||||
|
||||
db, err := sql.Open("sqlite3", fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d", setting.Database.Path, setting.Database.Timeout))
|
||||
db, err := sql.Open("sqlite3", fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d&_txlock=immediate", setting.Database.Path, setting.Database.Timeout))
|
||||
assert.NoError(t, err)
|
||||
defer db.Close()
|
||||
|
||||
|
|
|
@ -76,6 +76,53 @@ nARUPZ9SqaUmRm+KGsSyoYnvN9apiDk5KVQoyfrmweNN7DCIIcoh/B9Ax8nmouKz
|
|||
yBB2fjCM/bJNtN/AsgYbZIScuYK/xqTkwNtbe5WdCyD/QJOHTsPJzx59hgSVo6gf
|
||||
Fe8VBnxHtrY8gPSUU3gkhYLvLzyVX+YLNzRcffobd8gJbfumwFJUkz91oGvYz7xg
|
||||
XN2qmsgBNCbTIzWZMpRDMAbY+n2QFImGf+EJZlMdj6gOrIYq8N4+nMW1FwJivsOb
|
||||
muqySyjZnD2AYjEA6OYPXfCVhaB5fTfhQXbIrZbgsEh4ob/eIdM=
|
||||
=oSDR
|
||||
muqySyjZnD2AYjEA6OYPXfCVhaB5fTfhQXbIrZbgsEh4ob/eIdOdBVgEXta5egEM
|
||||
AMYlmZ47NqBMBeaN0o/ahYMe8eIMaroWkufMfC9VRBSMAkpbDl34oNp0cflmnMYo
|
||||
AFAl8ucRMFTiUnjiWpo27q14tjSyDVsn/CqwbnrgJgCFNV/MGsYsToEkb4JwDIRC
|
||||
bky+1BvqvI8RMlO3MlwzrlIaMrlQfx5NtUb9TyO7S4xZTz864+Ty5p3HhRwbdZMe
|
||||
Ko8sfXFhCcCHFXosI0mX83EyzsrXlbkGRawId7jvrdOAUg/cYP8f/XmV6z1NHHH9
|
||||
cvz+3oLOGuVxUdG0KuS/jigHrLWdRuKM3xfEeesp870yZU3AbyFdoHnGXROJePTl
|
||||
FV8j2P5Ahf/yuVhjdyJSKdZC2h6+HtLG9RiGgLviLLYhtlZG2H6pYyKY5Ud3php+
|
||||
qw1aYL1xtdxrHYkQlAa0vLY/mwpuPfMke9I+rtnrwlLRMCstdiN34ybZ4sRD+gL1
|
||||
w5VIZ/aM6/Gsczd3s/T8psIi09TKPfEU2gWLMGvlDsgz+aSDdVP7XYQpNglaEPet
|
||||
PwARAQABAAv8CHg6+hnV2pblTwGTlTU7V8DO3gwMfn/QhQ/8ju66G5a7J6p/ZreQ
|
||||
nfCJnqYq4AgoW0SuqVSBbbTENF6YjixNmiSlb9iHMZ+ilms24xG0Y3lOMBYYCY3Y
|
||||
nTSNf6nXyconz31TW7jLmTdG9hpykKEKO9WFgt5UpgWe+2CAgtUoBDZyaLrVBZ2h
|
||||
te99WmziDbPQZeZPm7UQ0aX0iRBclxy4+dxjcnrcmi1mdQAM/glgs2sHbEjN7JnV
|
||||
dTOvUSN7/8ixj6I719Wx6MN6jE+BNd0ytZOun6tcDl0vamfT5fBpqbQoJMib2ggo
|
||||
+FGg9VFnzEMLqyI47LfOKUjCIhwVsxS4q9HXa2FtpO8UfRMPjDKgDZQzRTRJScrP
|
||||
s1NJ9HiM/eCHS1YjRmgroo60HygxkoLVCHp+Rz/hi0tG/ptv4q6mdnm8Mwb5JJtV
|
||||
48EvmZoNTWl9xOez1wmQn6caVHipc0qDqn/veoe8N5wdc+3hoMEXbSXqU+kx2KUa
|
||||
cVxCCVoUeURhBgDUGWtx34j1y17zE92BYhtVJTCU89dDe4wOEqGPyCGvRtgTmZ+1
|
||||
KwWr66pij91MV9mlY+7Ue2QHUSmgav2EFGIjVes956p4/F/CJ6qaYoekirMSnmX5
|
||||
jhRt4p6RW7m4omha3LAQ+gN4Fqa4acZUywENBvv1x3v+IWbjGJGn3eBnRrP3o9P+
|
||||
QUAtyMifiRm0ZN8J767o+bzUVmscXrkh7Qml47lQfDToyRI1UZZQmP2izpwHcwbZ
|
||||
NtfkgRUdeEq4GJUGAO8o4Oebbt0ALZ54E2LHhk8xi4ofKkFBDCkUFjcqS3bJJNck
|
||||
rkhfqEkMLETNhPbiC4TRNiunI5PXOinwNPkKI8P/hfp4S49WdIvnARazCoxjZNtl
|
||||
0Cbo+F1wtOH9FZaaWzNlU2lCQ2JJ3MCpLHz+nEmdYWOIWGQu2/s7smLODVEFbYKR
|
||||
50VWVRL7mB83v1XdfMFvExdQ7i5MOX4hFvmwi/WJIKClJfhNwTrHp6Jrm9jA66RL
|
||||
+dNyPKfwcFcYrqt1gwYAruZzP7QgTYVL+cmvGtCaHY4KoR8hanbpqR4YbzzyEXwS
|
||||
ll2FUCaVSokuRAdH3+/CHF9bqog3Zvn6HYcCS/A/rHVGIU9a+7s5IbRe0Ysc2FAN
|
||||
Nm9AsC5YnuyoAjW3cJGaZLYxp2WOZcMEXZeLPFYrNz22R1nRoxnUIPRpsKICXcK0
|
||||
aC4rSMk479jc/8WprWx4d45EVG+6Gsh1AT8LVhDL9yHFrh50ss2jCe1Fnftet6DI
|
||||
V5zHcxBx4sCs91aPxxe12UiJA2wEGAEKACAWIQQ4G/p4KVUOUEVu5g5R68KXFICq
|
||||
DwUCXta5egIbAgHACRBR68KXFICqD8D0IAQZAQoAHRYhBKAm5ShdO9gmF/o8jan0
|
||||
RkmWoKbKBQJe1rl6AAoJEKn0RkmWoKbKacUL/3YYKmiVvcr5LYFzMdwdahkla+6m
|
||||
hEEkL0l3dJNuU97Ou71tA1ieF0fjbVRSWjXKsntKwhyPoXjaZEZwMmv7iZ8BXV+b
|
||||
oO/EG5sg2/6iukJFXZqGnQwMdLVo1jPoXDteZU1qYiCoxLHhGhHL7ivtD1ygEi6w
|
||||
/cMbbOEB5Le1vOWIwqazs8dDcAYyy1PKthRl0ygvh8CpqPwy+AK3uLm0TVwetQAp
|
||||
taux0bDYWCb5Aft1r1nlV44gU4RiC131TDo+TKd754+UuI+UHk1D+LjTmZxRX2S6
|
||||
fXgoMXzrWmthGPdqvVOgKWm7Ef18hmaBECvPnp/tUJeDVVe02KrYQi8Bf2kxveSd
|
||||
8T0N/ExcydU9HgzTL8MuyPI+yp086elQzKJu6vb9tpgxCcglQZrUNT9Uy82pzTRY
|
||||
z9MmhnCDI2SD5L/CW5PsNpPTPy7s3f9DOV0G5Vka4LTSBOCK64NvAGBmRf8rFjJU
|
||||
lPtRPhC7h6uHdUIx3Q550Xogvq5sQm8UBCsbG8OJDADT3FJSIulR9Sh96OsES3sc
|
||||
H09juN4KcbpS03MAeUFwXqw3jBMhDoGKlsjX17Jf31qh/nI/XjigS3XWyj1BLSMG
|
||||
rJfH0NyYoGDCnff37tf+8lD9km9TlnV4Qjd9ujYbDRsefhaSjLVcy/gqdxZEuNBC
|
||||
BWmGwsmLI3nyZ4KDtNsa5JUHUNNZLBN20hvmE41Eszmz4Yg9Ho9DxKiFKvzUULMc
|
||||
bnMHaVHseHHq6+NVUnN1SAcOA0ygjnEid8D57RtdBCD90LXjLB7vlR+HaSMZYOnr
|
||||
DtseivHvqqy4+rxhwV2S3avnls9vRwE4bV6GCiqhoBnWIZRrARLZc2OTBIya82vS
|
||||
BIS1eyhjif1mE7Lqhs6aPD+eqQK2mBtQ/sidN8P/IfKfVF5siXfFbuGZLz5nRIho
|
||||
Yp1z7oO3OZ09lpUk0G1h+ouIFF6goDP48M/AKtbvs9OWk3QKxnOUZD8sRncq95x6
|
||||
m4q1MVb+aJyxwBqDRGaFY+3TVArB1b+kG1JsAvV5dag=
|
||||
=511T
|
||||
-----END PGP PRIVATE KEY BLOCK-----
|
||||
|
|
|
@ -106,3 +106,57 @@ func TestPullCreate_TitleEscape(t *testing.T) {
|
|||
assert.Equal(t, "<u>XSS PR</u>", titleHTML)
|
||||
})
|
||||
}
|
||||
|
||||
func testUIDeleteBranch(t *testing.T, session *TestSession, ownerName, repoName, branchName string) {
|
||||
relURL := "/" + path.Join(ownerName, repoName, "branches")
|
||||
req := NewRequest(t, "GET", relURL)
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||
|
||||
req = NewRequestWithValues(t, "POST", relURL+"/delete", map[string]string{
|
||||
"_csrf": getCsrf(t, htmlDoc.doc),
|
||||
"name": branchName,
|
||||
})
|
||||
session.MakeRequest(t, req, http.StatusOK)
|
||||
}
|
||||
|
||||
func testDeleteRepository(t *testing.T, session *TestSession, ownerName, repoName string) {
|
||||
relURL := "/" + path.Join(ownerName, repoName, "settings")
|
||||
req := NewRequest(t, "GET", relURL)
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||
|
||||
req = NewRequestWithValues(t, "POST", relURL+"?action=delete", map[string]string{
|
||||
"_csrf": getCsrf(t, htmlDoc.doc),
|
||||
"repo_name": repoName,
|
||||
})
|
||||
session.MakeRequest(t, req, http.StatusFound)
|
||||
}
|
||||
|
||||
func TestPullBranchDelete(t *testing.T) {
|
||||
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
||||
defer prepareTestEnv(t)()
|
||||
|
||||
session := loginUser(t, "user1")
|
||||
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
||||
testCreateBranch(t, session, "user1", "repo1", "branch/master", "master1", http.StatusFound)
|
||||
testEditFile(t, session, "user1", "repo1", "master1", "README.md", "Hello, World (Edited)\n")
|
||||
resp := testPullCreate(t, session, "user1", "repo1", "master1", "This is a pull title")
|
||||
|
||||
// check the redirected URL
|
||||
url := resp.HeaderMap.Get("Location")
|
||||
assert.Regexp(t, "^/user2/repo1/pulls/[0-9]*$", url)
|
||||
req := NewRequest(t, "GET", url)
|
||||
session.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
// delete head branch and confirm pull page is ok
|
||||
testUIDeleteBranch(t, session, "user1", "repo1", "master1")
|
||||
req = NewRequest(t, "GET", url)
|
||||
session.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
// delete head repository and confirm pull page is ok
|
||||
testDeleteRepository(t, session, "user1", "repo1")
|
||||
req = NewRequest(t, "GET", url)
|
||||
session.MakeRequest(t, req, http.StatusOK)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -105,8 +105,6 @@ func TestPullRebase(t *testing.T) {
|
|||
|
||||
func TestPullRebaseMerge(t *testing.T) {
|
||||
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
||||
defer prepareTestEnv(t)()
|
||||
|
||||
hookTasks, err := models.HookTasks(1, 1) //Retrieve previous hook number
|
||||
assert.NoError(t, err)
|
||||
hookTasksLenBefore := len(hookTasks)
|
||||
|
@ -129,8 +127,6 @@ func TestPullRebaseMerge(t *testing.T) {
|
|||
|
||||
func TestPullSquash(t *testing.T) {
|
||||
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
||||
defer prepareTestEnv(t)()
|
||||
|
||||
hookTasks, err := models.HookTasks(1, 1) //Retrieve previous hook number
|
||||
assert.NoError(t, err)
|
||||
hookTasksLenBefore := len(hookTasks)
|
||||
|
@ -154,10 +150,9 @@ func TestPullSquash(t *testing.T) {
|
|||
|
||||
func TestPullCleanUpAfterMerge(t *testing.T) {
|
||||
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
||||
defer prepareTestEnv(t)()
|
||||
session := loginUser(t, "user1")
|
||||
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
||||
testEditFileToNewBranch(t, session, "user1", "repo1", "master", "feature/test", "README.md", "Hello, World (Edited)\n")
|
||||
testEditFileToNewBranch(t, session, "user1", "repo1", "master", "feature/test", "README.md", "Hello, World (Edited - TestPullCleanUpAfterMerge)\n")
|
||||
|
||||
resp := testPullCreate(t, session, "user1", "repo1", "feature/test", "This is a pull title")
|
||||
|
||||
|
@ -190,7 +185,6 @@ func TestPullCleanUpAfterMerge(t *testing.T) {
|
|||
|
||||
func TestCantMergeWorkInProgress(t *testing.T) {
|
||||
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
||||
defer prepareTestEnv(t)()
|
||||
session := loginUser(t, "user1")
|
||||
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
||||
testEditFile(t, session, "user1", "repo1", "master", "README.md", "Hello, World (Edited)\n")
|
||||
|
@ -212,7 +206,6 @@ func TestCantMergeWorkInProgress(t *testing.T) {
|
|||
|
||||
func TestCantMergeConflict(t *testing.T) {
|
||||
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
||||
defer prepareTestEnv(t)()
|
||||
session := loginUser(t, "user1")
|
||||
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
||||
testEditFileToNewBranch(t, session, "user1", "repo1", "master", "conflict", "README.md", "Hello, World (Edited Once)\n")
|
||||
|
@ -258,7 +251,6 @@ func TestCantMergeConflict(t *testing.T) {
|
|||
|
||||
func TestCantMergeUnrelated(t *testing.T) {
|
||||
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
||||
defer prepareTestEnv(t)()
|
||||
session := loginUser(t, "user1")
|
||||
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
||||
testEditFileToNewBranch(t, session, "user1", "repo1", "master", "base", "README.md", "Hello, World (Edited Twice)\n")
|
||||
|
|
|
@ -11,7 +11,6 @@ import (
|
|||
"strings"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
@ -48,20 +47,20 @@ func TestPullCreate_CommitStatus(t *testing.T) {
|
|||
|
||||
commitID := path.Base(commitURL)
|
||||
|
||||
statusList := []models.CommitStatusState{
|
||||
models.CommitStatusPending,
|
||||
models.CommitStatusError,
|
||||
models.CommitStatusFailure,
|
||||
models.CommitStatusWarning,
|
||||
models.CommitStatusSuccess,
|
||||
statusList := []api.CommitStatusState{
|
||||
api.CommitStatusPending,
|
||||
api.CommitStatusError,
|
||||
api.CommitStatusFailure,
|
||||
api.CommitStatusWarning,
|
||||
api.CommitStatusSuccess,
|
||||
}
|
||||
|
||||
statesIcons := map[models.CommitStatusState]string{
|
||||
models.CommitStatusPending: "circle icon yellow",
|
||||
models.CommitStatusSuccess: "check icon green",
|
||||
models.CommitStatusError: "warning icon red",
|
||||
models.CommitStatusFailure: "remove icon red",
|
||||
models.CommitStatusWarning: "warning sign icon yellow",
|
||||
statesIcons := map[api.CommitStatusState]string{
|
||||
api.CommitStatusPending: "circle icon yellow",
|
||||
api.CommitStatusSuccess: "check icon green",
|
||||
api.CommitStatusError: "warning icon red",
|
||||
api.CommitStatusFailure: "remove icon red",
|
||||
api.CommitStatusWarning: "warning sign icon yellow",
|
||||
}
|
||||
|
||||
// Update commit status, and check if icon is updated as well
|
||||
|
|
|
@ -20,7 +20,7 @@ func createNewRelease(t *testing.T, session *TestSession, repoURL, tag, title st
|
|||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||
|
||||
link, exists := htmlDoc.doc.Find("form").Attr("action")
|
||||
link, exists := htmlDoc.doc.Find("form.ui.form").Attr("action")
|
||||
assert.True(t, exists, "The template has changed")
|
||||
|
||||
postData := map[string]string{
|
||||
|
|
|
@ -14,7 +14,7 @@ func TestSignOut(t *testing.T) {
|
|||
|
||||
session := loginUser(t, "user2")
|
||||
|
||||
req := NewRequest(t, "GET", "/user/logout")
|
||||
req := NewRequest(t, "POST", "/user/logout")
|
||||
session.MakeRequest(t, req, http.StatusFound)
|
||||
|
||||
// try to view a private repo, should fail
|
||||
|
|
1
main.go
1
main.go
|
@ -68,6 +68,7 @@ arguments - which can alternatively be run by running the subcommand web.`
|
|||
cmd.CmdMigrate,
|
||||
cmd.CmdKeys,
|
||||
cmd.CmdConvert,
|
||||
cmd.CmdDoctor,
|
||||
}
|
||||
// Now adjust these commands to add our global configuration options
|
||||
|
||||
|
|
|
@ -122,10 +122,13 @@ func (a *Action) ShortActUserName() string {
|
|||
return base.EllipsisString(a.GetActUserName(), 20)
|
||||
}
|
||||
|
||||
// GetDisplayName gets the action's display name based on DEFAULT_SHOW_FULL_NAME
|
||||
// GetDisplayName gets the action's display name based on DEFAULT_SHOW_FULL_NAME, or falls back to the username if it is blank.
|
||||
func (a *Action) GetDisplayName() string {
|
||||
if setting.UI.DefaultShowFullName {
|
||||
return a.GetActFullName()
|
||||
trimmedFullName := strings.TrimSpace(a.GetActFullName())
|
||||
if len(trimmedFullName) > 0 {
|
||||
return trimmedFullName
|
||||
}
|
||||
}
|
||||
return a.ShortActUserName()
|
||||
}
|
||||
|
@ -212,7 +215,7 @@ func (a *Action) getCommentLink(e Engine) string {
|
|||
return "#"
|
||||
}
|
||||
if a.Comment == nil && a.CommentID != 0 {
|
||||
a.Comment, _ = GetCommentByID(a.CommentID)
|
||||
a.Comment, _ = getCommentByID(e, a.CommentID)
|
||||
}
|
||||
if a.Comment != nil {
|
||||
return a.Comment.HTMLURL()
|
||||
|
@ -432,6 +435,8 @@ func GetFeeds(opts GetFeedsOptions) ([]*Action, error) {
|
|||
}
|
||||
|
||||
cond = cond.And(builder.In("repo_id", repoIDs))
|
||||
} else {
|
||||
cond = cond.And(builder.In("repo_id", AccessibleRepoIDsQuery(opts.RequestingUserID)))
|
||||
}
|
||||
|
||||
cond = cond.And(builder.Eq{"user_id": opts.RequestedUser.ID})
|
||||
|
|
|
@ -79,7 +79,11 @@ func (a *Attachment) LinkedRepository() (*Repository, UnitType, error) {
|
|||
return nil, UnitTypeIssues, err
|
||||
}
|
||||
repo, err := GetRepositoryByID(iss.RepoID)
|
||||
return repo, UnitTypeIssues, err
|
||||
unitType := UnitTypeIssues
|
||||
if iss.IsPull {
|
||||
unitType = UnitTypePullRequests
|
||||
}
|
||||
return repo, unitType, err
|
||||
} else if a.ReleaseID != 0 {
|
||||
rel, err := GetReleaseByID(a.ReleaseID)
|
||||
if err != nil {
|
||||
|
@ -132,9 +136,8 @@ func GetAttachmentByID(id int64) (*Attachment, error) {
|
|||
}
|
||||
|
||||
func getAttachmentByID(e Engine, id int64) (*Attachment, error) {
|
||||
attach := &Attachment{ID: id}
|
||||
|
||||
if has, err := e.Get(attach); err != nil {
|
||||
attach := &Attachment{}
|
||||
if has, err := e.ID(id).Get(attach); err != nil {
|
||||
return nil, err
|
||||
} else if !has {
|
||||
return nil, ErrAttachmentNotExist{ID: id, UUID: ""}
|
||||
|
@ -143,8 +146,8 @@ func getAttachmentByID(e Engine, id int64) (*Attachment, error) {
|
|||
}
|
||||
|
||||
func getAttachmentByUUID(e Engine, uuid string) (*Attachment, error) {
|
||||
attach := &Attachment{UUID: uuid}
|
||||
has, err := e.Get(attach)
|
||||
attach := &Attachment{}
|
||||
has, err := e.Where("uuid=?", uuid).Get(attach)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if !has {
|
||||
|
@ -195,7 +198,7 @@ func GetAttachmentsByCommentID(commentID int64) ([]*Attachment, error) {
|
|||
|
||||
func getAttachmentsByCommentID(e Engine, commentID int64) ([]*Attachment, error) {
|
||||
attachments := make([]*Attachment, 0, 10)
|
||||
return attachments, x.Where("comment_id=?", commentID).Find(&attachments)
|
||||
return attachments, e.Where("comment_id=?", commentID).Find(&attachments)
|
||||
}
|
||||
|
||||
// getAttachmentByReleaseIDFileName return a file based on the the following infos:
|
||||
|
|
|
@ -138,7 +138,7 @@ func TestLinkedRepository(t *testing.T) {
|
|||
expectedUnitType UnitType
|
||||
}{
|
||||
{"LinkedIssue", 1, &Repository{ID: 1}, UnitTypeIssues},
|
||||
{"LinkedComment", 3, &Repository{ID: 1}, UnitTypeIssues},
|
||||
{"LinkedComment", 3, &Repository{ID: 1}, UnitTypePullRequests},
|
||||
{"LinkedRelease", 9, &Repository{ID: 1}, UnitTypeReleases},
|
||||
{"Notlinked", 10, nil, -1},
|
||||
}
|
||||
|
|
|
@ -113,6 +113,28 @@ func (protectBranch *ProtectedBranch) CanUserMerge(userID int64) bool {
|
|||
return in
|
||||
}
|
||||
|
||||
// IsUserMergeWhitelisted checks if some user is whitelisted to merge to this branch
|
||||
func (protectBranch *ProtectedBranch) IsUserMergeWhitelisted(userID int64) bool {
|
||||
if !protectBranch.EnableMergeWhitelist {
|
||||
return true
|
||||
}
|
||||
|
||||
if base.Int64sContains(protectBranch.MergeWhitelistUserIDs, userID) {
|
||||
return true
|
||||
}
|
||||
|
||||
if len(protectBranch.MergeWhitelistTeamIDs) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
in, err := IsUserInTeams(userID, protectBranch.MergeWhitelistTeamIDs)
|
||||
if err != nil {
|
||||
log.Error("IsUserInTeams: %v", err)
|
||||
return false
|
||||
}
|
||||
return in
|
||||
}
|
||||
|
||||
// IsUserOfficialReviewer check if user is official reviewer for the branch (counts towards required approvals)
|
||||
func (protectBranch *ProtectedBranch) IsUserOfficialReviewer(user *User) (bool, error) {
|
||||
return protectBranch.isUserOfficialReviewer(x, user)
|
||||
|
@ -209,8 +231,8 @@ func getProtectedBranchBy(e Engine, repoID int64, branchName string) (*Protected
|
|||
|
||||
// GetProtectedBranchByID getting protected branch by ID
|
||||
func GetProtectedBranchByID(id int64) (*ProtectedBranch, error) {
|
||||
rel := &ProtectedBranch{ID: id}
|
||||
has, err := x.Get(rel)
|
||||
rel := &ProtectedBranch{}
|
||||
has, err := x.ID(id).Get(rel)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -499,9 +521,9 @@ func (repo *Repository) GetDeletedBranches() ([]*DeletedBranch, error) {
|
|||
}
|
||||
|
||||
// GetDeletedBranchByID get a deleted branch by its ID
|
||||
func (repo *Repository) GetDeletedBranchByID(ID int64) (*DeletedBranch, error) {
|
||||
deletedBranch := &DeletedBranch{ID: ID}
|
||||
has, err := x.Get(deletedBranch)
|
||||
func (repo *Repository) GetDeletedBranchByID(id int64) (*DeletedBranch, error) {
|
||||
deletedBranch := &DeletedBranch{}
|
||||
has, err := x.ID(id).Get(deletedBranch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -19,52 +19,19 @@ import (
|
|||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
// CommitStatusState holds the state of a Status
|
||||
// It can be "pending", "success", "error", "failure", and "warning"
|
||||
type CommitStatusState string
|
||||
|
||||
// IsWorseThan returns true if this State is worse than the given State
|
||||
func (css CommitStatusState) IsWorseThan(css2 CommitStatusState) bool {
|
||||
switch css {
|
||||
case CommitStatusError:
|
||||
return true
|
||||
case CommitStatusFailure:
|
||||
return css2 != CommitStatusError
|
||||
case CommitStatusWarning:
|
||||
return css2 != CommitStatusError && css2 != CommitStatusFailure
|
||||
case CommitStatusSuccess:
|
||||
return css2 != CommitStatusError && css2 != CommitStatusFailure && css2 != CommitStatusWarning
|
||||
default:
|
||||
return css2 != CommitStatusError && css2 != CommitStatusFailure && css2 != CommitStatusWarning && css2 != CommitStatusSuccess
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
// CommitStatusPending is for when the Status is Pending
|
||||
CommitStatusPending CommitStatusState = "pending"
|
||||
// CommitStatusSuccess is for when the Status is Success
|
||||
CommitStatusSuccess CommitStatusState = "success"
|
||||
// CommitStatusError is for when the Status is Error
|
||||
CommitStatusError CommitStatusState = "error"
|
||||
// CommitStatusFailure is for when the Status is Failure
|
||||
CommitStatusFailure CommitStatusState = "failure"
|
||||
// CommitStatusWarning is for when the Status is Warning
|
||||
CommitStatusWarning CommitStatusState = "warning"
|
||||
)
|
||||
|
||||
// CommitStatus holds a single Status of a single Commit
|
||||
type CommitStatus struct {
|
||||
ID int64 `xorm:"pk autoincr"`
|
||||
Index int64 `xorm:"INDEX UNIQUE(repo_sha_index)"`
|
||||
RepoID int64 `xorm:"INDEX UNIQUE(repo_sha_index)"`
|
||||
Repo *Repository `xorm:"-"`
|
||||
State CommitStatusState `xorm:"VARCHAR(7) NOT NULL"`
|
||||
SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_sha_index)"`
|
||||
TargetURL string `xorm:"TEXT"`
|
||||
Description string `xorm:"TEXT"`
|
||||
ContextHash string `xorm:"char(40) index"`
|
||||
Context string `xorm:"TEXT"`
|
||||
Creator *User `xorm:"-"`
|
||||
ID int64 `xorm:"pk autoincr"`
|
||||
Index int64 `xorm:"INDEX UNIQUE(repo_sha_index)"`
|
||||
RepoID int64 `xorm:"INDEX UNIQUE(repo_sha_index)"`
|
||||
Repo *Repository `xorm:"-"`
|
||||
State api.CommitStatusState `xorm:"VARCHAR(7) NOT NULL"`
|
||||
SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_sha_index)"`
|
||||
TargetURL string `xorm:"TEXT"`
|
||||
Description string `xorm:"TEXT"`
|
||||
ContextHash string `xorm:"char(40) index"`
|
||||
Context string `xorm:"TEXT"`
|
||||
Creator *User `xorm:"-"`
|
||||
CreatorID int64
|
||||
|
||||
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
|
||||
|
@ -118,9 +85,9 @@ func (status *CommitStatus) APIFormat() *api.Status {
|
|||
// CalcCommitStatus returns commit status state via some status, the commit statues should order by id desc
|
||||
func CalcCommitStatus(statuses []*CommitStatus) *CommitStatus {
|
||||
var lastStatus *CommitStatus
|
||||
var state CommitStatusState
|
||||
var state api.CommitStatusState
|
||||
for _, status := range statuses {
|
||||
if status.State.IsWorseThan(state) {
|
||||
if status.State.NoBetterThan(state) {
|
||||
state = status.State
|
||||
lastStatus = status
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package models
|
|||
import (
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/modules/structs"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
|
@ -23,22 +24,22 @@ func TestGetCommitStatuses(t *testing.T) {
|
|||
assert.Len(t, statuses, 5)
|
||||
|
||||
assert.Equal(t, "ci/awesomeness", statuses[0].Context)
|
||||
assert.Equal(t, CommitStatusPending, statuses[0].State)
|
||||
assert.Equal(t, structs.CommitStatusPending, statuses[0].State)
|
||||
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[0].APIURL())
|
||||
|
||||
assert.Equal(t, "cov/awesomeness", statuses[1].Context)
|
||||
assert.Equal(t, CommitStatusWarning, statuses[1].State)
|
||||
assert.Equal(t, structs.CommitStatusWarning, statuses[1].State)
|
||||
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[1].APIURL())
|
||||
|
||||
assert.Equal(t, "cov/awesomeness", statuses[2].Context)
|
||||
assert.Equal(t, CommitStatusSuccess, statuses[2].State)
|
||||
assert.Equal(t, structs.CommitStatusSuccess, statuses[2].State)
|
||||
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[2].APIURL())
|
||||
|
||||
assert.Equal(t, "ci/awesomeness", statuses[3].Context)
|
||||
assert.Equal(t, CommitStatusFailure, statuses[3].State)
|
||||
assert.Equal(t, structs.CommitStatusFailure, statuses[3].State)
|
||||
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[3].APIURL())
|
||||
|
||||
assert.Equal(t, "deploy/awesomeness", statuses[4].Context)
|
||||
assert.Equal(t, CommitStatusError, statuses[4].State)
|
||||
assert.Equal(t, structs.CommitStatusError, statuses[4].State)
|
||||
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[4].APIURL())
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"xorm.io/builder"
|
||||
)
|
||||
|
||||
// consistencyCheckable a type that can be tested for database consistency
|
||||
|
@ -167,3 +168,23 @@ func (action *Action) checkForConsistency(t *testing.T) {
|
|||
repo := AssertExistsAndLoadBean(t, &Repository{ID: action.RepoID}).(*Repository)
|
||||
assert.Equal(t, repo.IsPrivate, action.IsPrivate, "action: %+v", action)
|
||||
}
|
||||
|
||||
// CountOrphanedObjects count subjects with have no existing refobject anymore
|
||||
func CountOrphanedObjects(subject, refobject, joinCond string) (int64, error) {
|
||||
var ids []int64
|
||||
|
||||
return int64(len(ids)), x.Table("`"+subject+"`").
|
||||
Join("LEFT", refobject, joinCond).
|
||||
Where(builder.IsNull{"`" + refobject + "`.id"}).
|
||||
Select("id").Find(&ids)
|
||||
}
|
||||
|
||||
// DeleteOrphanedObjects delete subjects with have no existing refobject anymore
|
||||
func DeleteOrphanedObjects(subject, refobject, joinCond string) error {
|
||||
_, err := x.In("id", builder.Select("`"+subject+"`.id").
|
||||
From("`"+subject+"`").
|
||||
Join("LEFT", "`"+refobject+"`", joinCond).
|
||||
Where(builder.IsNull{"`" + refobject + "`.id"})).
|
||||
Delete("`" + subject + "`")
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -4,6 +4,11 @@
|
|||
|
||||
package models
|
||||
|
||||
import (
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"xorm.io/builder"
|
||||
)
|
||||
|
||||
// DBContext represents a db context
|
||||
type DBContext struct {
|
||||
e Engine
|
||||
|
@ -53,3 +58,10 @@ func WithTx(f func(ctx DBContext) error) error {
|
|||
sess.Close()
|
||||
return err
|
||||
}
|
||||
|
||||
// Iterate iterates the databases and doing something
|
||||
func Iterate(ctx DBContext, tableBean interface{}, cond builder.Cond, fun func(idx int, bean interface{}) error) error {
|
||||
return ctx.e.Where(cond).
|
||||
BufferSize(setting.Database.IterateBufferSize).
|
||||
Iterate(tableBean, fun)
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package models
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
)
|
||||
|
@ -56,6 +57,21 @@ func (err ErrNamePatternNotAllowed) Error() string {
|
|||
return fmt.Sprintf("name pattern is not allowed [pattern: %s]", err.Pattern)
|
||||
}
|
||||
|
||||
// ErrNameCharsNotAllowed represents a "character not allowed in name" error.
|
||||
type ErrNameCharsNotAllowed struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
// IsErrNameCharsNotAllowed checks if an error is an ErrNameCharsNotAllowed.
|
||||
func IsErrNameCharsNotAllowed(err error) bool {
|
||||
_, ok := err.(ErrNameCharsNotAllowed)
|
||||
return ok
|
||||
}
|
||||
|
||||
func (err ErrNameCharsNotAllowed) Error() string {
|
||||
return fmt.Sprintf("User name is invalid [%s]: must be valid alpha or numeric or dash(-_) or dot characters", err.Name)
|
||||
}
|
||||
|
||||
// ErrSSHDisabled represents an "SSH disabled" error.
|
||||
type ErrSSHDisabled struct {
|
||||
}
|
||||
|
@ -1355,6 +1371,53 @@ func (err ErrMergePushOutOfDate) Error() string {
|
|||
return fmt.Sprintf("Merge PushOutOfDate Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut)
|
||||
}
|
||||
|
||||
// ErrPushRejected represents an error if merging fails due to rejection from a hook
|
||||
type ErrPushRejected struct {
|
||||
Style MergeStyle
|
||||
Message string
|
||||
StdOut string
|
||||
StdErr string
|
||||
Err error
|
||||
}
|
||||
|
||||
// IsErrPushRejected checks if an error is a ErrPushRejected.
|
||||
func IsErrPushRejected(err error) bool {
|
||||
_, ok := err.(ErrPushRejected)
|
||||
return ok
|
||||
}
|
||||
|
||||
func (err ErrPushRejected) Error() string {
|
||||
return fmt.Sprintf("Merge PushRejected Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut)
|
||||
}
|
||||
|
||||
// GenerateMessage generates the remote message from the stderr
|
||||
func (err *ErrPushRejected) GenerateMessage() {
|
||||
messageBuilder := &strings.Builder{}
|
||||
i := strings.Index(err.StdErr, "remote: ")
|
||||
if i < 0 {
|
||||
err.Message = ""
|
||||
return
|
||||
}
|
||||
for {
|
||||
if len(err.StdErr) <= i+8 {
|
||||
break
|
||||
}
|
||||
if err.StdErr[i:i+8] != "remote: " {
|
||||
break
|
||||
}
|
||||
i += 8
|
||||
nl := strings.IndexByte(err.StdErr[i:], '\n')
|
||||
if nl >= 0 {
|
||||
messageBuilder.WriteString(err.StdErr[i : i+nl+1])
|
||||
i = i + nl + 1
|
||||
} else {
|
||||
messageBuilder.WriteString(err.StdErr[i:])
|
||||
i = len(err.StdErr)
|
||||
}
|
||||
}
|
||||
err.Message = strings.TrimSpace(messageBuilder.String())
|
||||
}
|
||||
|
||||
// ErrRebaseConflicts represents an error if rebase fails with a conflict
|
||||
type ErrRebaseConflicts struct {
|
||||
Style MergeStyle
|
||||
|
|
|
@ -369,6 +369,7 @@ type CommitVerification struct {
|
|||
CommittingUser *User
|
||||
SigningEmail string
|
||||
SigningKey *GPGKey
|
||||
TrustStatus string
|
||||
}
|
||||
|
||||
// SignCommit represents a commit with validation of signature.
|
||||
|
@ -735,6 +736,21 @@ func verifyWithGPGSettings(gpgSettings *git.GPGSettings, sig *packet.Signature,
|
|||
CanSign: pubkey.CanSign(),
|
||||
KeyID: pubkey.KeyIdString(),
|
||||
}
|
||||
for _, subKey := range ekey.Subkeys {
|
||||
content, err := base64EncPubKey(subKey.PublicKey)
|
||||
if err != nil {
|
||||
return &CommitVerification{
|
||||
CommittingUser: committer,
|
||||
Verified: false,
|
||||
Reason: "gpg.error.generate_hash",
|
||||
}
|
||||
}
|
||||
k.SubsKey = append(k.SubsKey, &GPGKey{
|
||||
Content: content,
|
||||
CanSign: subKey.PublicKey.CanSign(),
|
||||
KeyID: subKey.PublicKey.KeyIdString(),
|
||||
})
|
||||
}
|
||||
if commitVerification := hashAndVerifyWithSubKeys(sig, payload, k, committer, &User{
|
||||
Name: gpgSettings.Name,
|
||||
Email: gpgSettings.Email,
|
||||
|
@ -754,18 +770,54 @@ func verifyWithGPGSettings(gpgSettings *git.GPGSettings, sig *packet.Signature,
|
|||
}
|
||||
|
||||
// ParseCommitsWithSignature checks if signaute of commits are corresponding to users gpg keys.
|
||||
func ParseCommitsWithSignature(oldCommits *list.List) *list.List {
|
||||
func ParseCommitsWithSignature(oldCommits *list.List, repository *Repository) *list.List {
|
||||
var (
|
||||
newCommits = list.New()
|
||||
e = oldCommits.Front()
|
||||
)
|
||||
memberMap := map[int64]bool{}
|
||||
|
||||
for e != nil {
|
||||
c := e.Value.(UserCommit)
|
||||
newCommits.PushBack(SignCommit{
|
||||
signCommit := SignCommit{
|
||||
UserCommit: &c,
|
||||
Verification: ParseCommitWithSignature(c.Commit),
|
||||
})
|
||||
}
|
||||
|
||||
_ = CalculateTrustStatus(signCommit.Verification, repository, &memberMap)
|
||||
|
||||
newCommits.PushBack(signCommit)
|
||||
e = e.Next()
|
||||
}
|
||||
return newCommits
|
||||
}
|
||||
|
||||
// CalculateTrustStatus will calculate the TrustStatus for a commit verification within a repository
|
||||
func CalculateTrustStatus(verification *CommitVerification, repository *Repository, memberMap *map[int64]bool) (err error) {
|
||||
if verification.Verified {
|
||||
verification.TrustStatus = "trusted"
|
||||
if verification.SigningUser.ID != 0 {
|
||||
var isMember bool
|
||||
if memberMap != nil {
|
||||
var has bool
|
||||
isMember, has = (*memberMap)[verification.SigningUser.ID]
|
||||
if !has {
|
||||
isMember, err = repository.IsOwnerMemberCollaborator(verification.SigningUser.ID)
|
||||
(*memberMap)[verification.SigningUser.ID] = isMember
|
||||
}
|
||||
} else {
|
||||
isMember, err = repository.IsOwnerMemberCollaborator(verification.SigningUser.ID)
|
||||
}
|
||||
|
||||
if !isMember {
|
||||
verification.TrustStatus = "untrusted"
|
||||
if verification.CommittingUser.ID != verification.SigningUser.ID {
|
||||
// The committing user and the signing user are not the same and are not the default key
|
||||
// This should be marked as questionable unless the signing user is a collaborator/team member etc.
|
||||
verification.TrustStatus = "unmatched"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
|
@ -74,8 +74,8 @@ var (
|
|||
issueTasksDonePat *regexp.Regexp
|
||||
)
|
||||
|
||||
const issueTasksRegexpStr = `(^\s*[-*]\s\[[\sx]\]\s.)|(\n\s*[-*]\s\[[\sx]\]\s.)`
|
||||
const issueTasksDoneRegexpStr = `(^\s*[-*]\s\[[x]\]\s.)|(\n\s*[-*]\s\[[x]\]\s.)`
|
||||
const issueTasksRegexpStr = `(^\s*[-*]\s\[[\sxX]\]\s.)|(\n\s*[-*]\s\[[\sxX]\]\s.)`
|
||||
const issueTasksDoneRegexpStr = `(^\s*[-*]\s\[[xX]\]\s.)|(\n\s*[-*]\s\[[xX]\]\s.)`
|
||||
const issueMaxDupIndexAttempts = 3
|
||||
|
||||
func init() {
|
||||
|
@ -241,7 +241,7 @@ func (issue *Issue) loadReactions(e Engine) (err error) {
|
|||
}
|
||||
|
||||
func (issue *Issue) loadMilestone(e Engine) (err error) {
|
||||
if issue.Milestone == nil && issue.MilestoneID > 0 {
|
||||
if (issue.Milestone == nil || issue.Milestone.ID != issue.MilestoneID) && issue.MilestoneID > 0 {
|
||||
issue.Milestone, err = getMilestoneByRepoID(e, issue.RepoID, issue.MilestoneID)
|
||||
if err != nil && !IsErrMilestoneNotExist(err) {
|
||||
return fmt.Errorf("getMilestoneByRepoID [repo_id: %d, milestone_id: %d]: %v", issue.RepoID, issue.MilestoneID, err)
|
||||
|
@ -381,6 +381,7 @@ func (issue *Issue) apiFormat(e Engine) *api.Issue {
|
|||
apiIssue := &api.Issue{
|
||||
ID: issue.ID,
|
||||
URL: issue.APIURL(),
|
||||
HTMLURL: issue.HTMLURL(),
|
||||
Index: issue.Index,
|
||||
Poster: issue.Poster.APIFormat(),
|
||||
Title: issue.Title,
|
||||
|
@ -402,11 +403,12 @@ func (issue *Issue) apiFormat(e Engine) *api.Issue {
|
|||
apiIssue.Closed = issue.ClosedUnix.AsTimePtr()
|
||||
}
|
||||
|
||||
issue.loadMilestone(e)
|
||||
if issue.Milestone != nil {
|
||||
apiIssue.Milestone = issue.Milestone.APIFormat()
|
||||
}
|
||||
issue.loadAssignees(e)
|
||||
|
||||
issue.loadAssignees(e)
|
||||
if len(issue.Assignees) > 0 {
|
||||
for _, assignee := range issue.Assignees {
|
||||
apiIssue.Assignees = append(apiIssue.Assignees, assignee.APIFormat())
|
||||
|
@ -436,7 +438,7 @@ func (issue *Issue) HashTag() string {
|
|||
|
||||
// IsPoster returns true if given user by ID is the poster.
|
||||
func (issue *Issue) IsPoster(uid int64) bool {
|
||||
return issue.PosterID == uid
|
||||
return issue.OriginalAuthorID == 0 && issue.PosterID == uid
|
||||
}
|
||||
|
||||
func (issue *Issue) hasLabel(e Engine, labelID int64) bool {
|
||||
|
@ -671,6 +673,10 @@ func (issue *Issue) changeStatus(e *xorm.Session, doer *User, isClosed bool) (*C
|
|||
return nil, err
|
||||
}
|
||||
|
||||
if err := issue.updateClosedNum(e); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// New action comment
|
||||
cmtType := CommentTypeClose
|
||||
if !issue.IsClosed {
|
||||
|
@ -1334,6 +1340,36 @@ type IssueStatsOptions struct {
|
|||
|
||||
// GetIssueStats returns issue statistic information by given conditions.
|
||||
func GetIssueStats(opts *IssueStatsOptions) (*IssueStats, error) {
|
||||
if len(opts.IssueIDs) <= maxQueryParameters {
|
||||
return getIssueStatsChunk(opts, opts.IssueIDs)
|
||||
}
|
||||
|
||||
// If too long a list of IDs is provided, we get the statistics in
|
||||
// smaller chunks and get accumulates. Note: this could potentially
|
||||
// get us invalid results. The alternative is to insert the list of
|
||||
// ids in a temporary table and join from them.
|
||||
accum := &IssueStats{}
|
||||
for i := 0; i < len(opts.IssueIDs); {
|
||||
chunk := i + maxQueryParameters
|
||||
if chunk > len(opts.IssueIDs) {
|
||||
chunk = len(opts.IssueIDs)
|
||||
}
|
||||
stats, err := getIssueStatsChunk(opts, opts.IssueIDs[i:chunk])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
accum.OpenCount += stats.OpenCount
|
||||
accum.ClosedCount += stats.ClosedCount
|
||||
accum.YourRepositoriesCount += stats.YourRepositoriesCount
|
||||
accum.AssignCount += stats.AssignCount
|
||||
accum.CreateCount += stats.CreateCount
|
||||
accum.OpenCount += stats.MentionCount
|
||||
i = chunk
|
||||
}
|
||||
return accum, nil
|
||||
}
|
||||
|
||||
func getIssueStatsChunk(opts *IssueStatsOptions, issueIDs []int64) (*IssueStats, error) {
|
||||
stats := &IssueStats{}
|
||||
|
||||
countSession := func(opts *IssueStatsOptions) *xorm.Session {
|
||||
|
|
|
@ -749,8 +749,12 @@ func CreateRefComment(doer *User, repo *Repository, issue *Issue, content, commi
|
|||
|
||||
// GetCommentByID returns the comment by given ID.
|
||||
func GetCommentByID(id int64) (*Comment, error) {
|
||||
return getCommentByID(x, id)
|
||||
}
|
||||
|
||||
func getCommentByID(e Engine, id int64) (*Comment, error) {
|
||||
c := new(Comment)
|
||||
has, err := x.ID(id).Get(c)
|
||||
has, err := e.ID(id).Get(c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if !has {
|
||||
|
|
|
@ -376,6 +376,11 @@ func (comments CommentList) loadDependentIssues(e Engine) error {
|
|||
for _, comment := range comments {
|
||||
if comment.DependentIssue == nil {
|
||||
comment.DependentIssue = issues[comment.DependentIssueID]
|
||||
if comment.DependentIssue != nil {
|
||||
if err := comment.DependentIssue.loadRepo(e); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -212,11 +212,8 @@ func getLabelInRepoByName(e Engine, repoID int64, labelName string) (*Label, err
|
|||
return nil, ErrLabelNotExist{0, repoID}
|
||||
}
|
||||
|
||||
l := &Label{
|
||||
Name: labelName,
|
||||
RepoID: repoID,
|
||||
}
|
||||
has, err := e.Get(l)
|
||||
l := &Label{}
|
||||
has, err := e.Where("name=? AND repo_id=?", labelName, repoID).Get(l)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if !has {
|
||||
|
|
|
@ -6,6 +6,7 @@ package models
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
|
@ -95,6 +96,8 @@ func NewMilestone(m *Milestone) (err error) {
|
|||
return err
|
||||
}
|
||||
|
||||
m.Name = strings.TrimSpace(m.Name)
|
||||
|
||||
if _, err = sess.Insert(m); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -268,6 +271,7 @@ func GetMilestones(repoID int64, page int, isClosed bool, sortType string) (Mile
|
|||
}
|
||||
|
||||
func updateMilestone(e Engine, m *Milestone) error {
|
||||
m.Name = strings.TrimSpace(m.Name)
|
||||
_, err := e.ID(m.ID).AllCols().
|
||||
SetExpr("num_issues", builder.Select("count(*)").From("issue").Where(
|
||||
builder.Eq{"milestone_id": m.ID},
|
||||
|
@ -283,12 +287,33 @@ func updateMilestone(e Engine, m *Milestone) error {
|
|||
}
|
||||
|
||||
// UpdateMilestone updates information of given milestone.
|
||||
func UpdateMilestone(m *Milestone) error {
|
||||
if err := updateMilestone(x, m); err != nil {
|
||||
func UpdateMilestone(m *Milestone, oldIsClosed bool) error {
|
||||
sess := x.NewSession()
|
||||
defer sess.Close()
|
||||
if err := sess.Begin(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return updateMilestoneCompleteness(x, m.ID)
|
||||
if m.IsClosed && !oldIsClosed {
|
||||
m.ClosedDateUnix = timeutil.TimeStampNow()
|
||||
}
|
||||
|
||||
if err := updateMilestone(sess, m); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := updateMilestoneCompleteness(sess, m.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// if IsClosed changed, update milestone numbers of repository
|
||||
if oldIsClosed != m.IsClosed {
|
||||
if err := updateRepoMilestoneNum(sess, m.RepoID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return sess.Commit()
|
||||
}
|
||||
|
||||
func updateMilestoneCompleteness(e Engine, milestoneID int64) error {
|
||||
|
@ -496,10 +521,12 @@ func DeleteMilestoneByRepoID(repoID, id int64) error {
|
|||
return sess.Commit()
|
||||
}
|
||||
|
||||
// CountMilestonesByRepoIDs map from repoIDs to number of milestones matching the options`
|
||||
func CountMilestonesByRepoIDs(repoIDs []int64, isClosed bool) (map[int64]int64, error) {
|
||||
// CountMilestones map from repo conditions to number of milestones matching the options`
|
||||
func CountMilestones(repoCond builder.Cond, isClosed bool) (map[int64]int64, error) {
|
||||
sess := x.Where("is_closed = ?", isClosed)
|
||||
sess.In("repo_id", repoIDs)
|
||||
if repoCond.IsValid() {
|
||||
sess.In("repo_id", builder.Select("id").From("repository").Where(repoCond))
|
||||
}
|
||||
|
||||
countsSlice := make([]*struct {
|
||||
RepoID int64
|
||||
|
@ -519,11 +546,21 @@ func CountMilestonesByRepoIDs(repoIDs []int64, isClosed bool) (map[int64]int64,
|
|||
return countMap, nil
|
||||
}
|
||||
|
||||
// GetMilestonesByRepoIDs returns a list of milestones of given repositories and status.
|
||||
func GetMilestonesByRepoIDs(repoIDs []int64, page int, isClosed bool, sortType string) (MilestoneList, error) {
|
||||
// CountMilestonesByRepoIDs map from repoIDs to number of milestones matching the options`
|
||||
func CountMilestonesByRepoIDs(repoIDs []int64, isClosed bool) (map[int64]int64, error) {
|
||||
return CountMilestones(
|
||||
builder.In("repo_id", repoIDs),
|
||||
isClosed,
|
||||
)
|
||||
}
|
||||
|
||||
// SearchMilestones search milestones
|
||||
func SearchMilestones(repoCond builder.Cond, page int, isClosed bool, sortType string) (MilestoneList, error) {
|
||||
miles := make([]*Milestone, 0, setting.UI.IssuePagingNum)
|
||||
sess := x.Where("is_closed = ?", isClosed)
|
||||
sess.In("repo_id", repoIDs)
|
||||
if repoCond.IsValid() {
|
||||
sess.In("repo_id", builder.Select("id").From("repository").Where(repoCond))
|
||||
}
|
||||
if page > 0 {
|
||||
sess = sess.Limit(setting.UI.IssuePagingNum, (page-1)*setting.UI.IssuePagingNum)
|
||||
}
|
||||
|
@ -545,25 +582,45 @@ func GetMilestonesByRepoIDs(repoIDs []int64, page int, isClosed bool, sortType s
|
|||
return miles, sess.Find(&miles)
|
||||
}
|
||||
|
||||
// GetMilestonesByRepoIDs returns a list of milestones of given repositories and status.
|
||||
func GetMilestonesByRepoIDs(repoIDs []int64, page int, isClosed bool, sortType string) (MilestoneList, error) {
|
||||
return SearchMilestones(
|
||||
builder.In("repo_id", repoIDs),
|
||||
page,
|
||||
isClosed,
|
||||
sortType,
|
||||
)
|
||||
}
|
||||
|
||||
// MilestonesStats represents milestone statistic information.
|
||||
type MilestonesStats struct {
|
||||
OpenCount, ClosedCount int64
|
||||
}
|
||||
|
||||
// Total returns the total counts of milestones
|
||||
func (m MilestonesStats) Total() int64 {
|
||||
return m.OpenCount + m.ClosedCount
|
||||
}
|
||||
|
||||
// GetMilestonesStats returns milestone statistic information for dashboard by given conditions.
|
||||
func GetMilestonesStats(userRepoIDs []int64) (*MilestonesStats, error) {
|
||||
func GetMilestonesStats(repoCond builder.Cond) (*MilestonesStats, error) {
|
||||
var err error
|
||||
stats := &MilestonesStats{}
|
||||
|
||||
stats.OpenCount, err = x.Where("is_closed = ?", false).
|
||||
And(builder.In("repo_id", userRepoIDs)).
|
||||
Count(new(Milestone))
|
||||
sess := x.Where("is_closed = ?", false)
|
||||
if repoCond.IsValid() {
|
||||
sess.And(builder.In("repo_id", builder.Select("id").From("repository").Where(repoCond)))
|
||||
}
|
||||
stats.OpenCount, err = sess.Count(new(Milestone))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stats.ClosedCount, err = x.Where("is_closed = ?", true).
|
||||
And(builder.In("repo_id", userRepoIDs)).
|
||||
Count(new(Milestone))
|
||||
|
||||
sess = x.Where("is_closed = ?", true)
|
||||
if repoCond.IsValid() {
|
||||
sess.And(builder.In("repo_id", builder.Select("id").From("repository").Where(repoCond)))
|
||||
}
|
||||
stats.ClosedCount, err = sess.Count(new(Milestone))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ import (
|
|||
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
"xorm.io/builder"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
@ -158,10 +159,11 @@ func TestUpdateMilestone(t *testing.T) {
|
|||
assert.NoError(t, PrepareTestDatabase())
|
||||
|
||||
milestone := AssertExistsAndLoadBean(t, &Milestone{ID: 1}).(*Milestone)
|
||||
milestone.Name = "newMilestoneName"
|
||||
milestone.Name = " newMilestoneName "
|
||||
milestone.Content = "newMilestoneContent"
|
||||
assert.NoError(t, UpdateMilestone(milestone))
|
||||
AssertExistsAndLoadBean(t, milestone)
|
||||
assert.NoError(t, UpdateMilestone(milestone, milestone.IsClosed))
|
||||
milestone = AssertExistsAndLoadBean(t, &Milestone{ID: 1}).(*Milestone)
|
||||
assert.EqualValues(t, "newMilestoneName", milestone.Name)
|
||||
CheckConsistencyFor(t, &Milestone{})
|
||||
}
|
||||
|
||||
|
@ -369,7 +371,7 @@ func TestGetMilestonesStats(t *testing.T) {
|
|||
repo1 := AssertExistsAndLoadBean(t, &Repository{ID: 1}).(*Repository)
|
||||
repo2 := AssertExistsAndLoadBean(t, &Repository{ID: 2}).(*Repository)
|
||||
|
||||
milestoneStats, err := GetMilestonesStats([]int64{repo1.ID, repo2.ID})
|
||||
milestoneStats, err := GetMilestonesStats(builder.In("repo_id", []int64{repo1.ID, repo2.ID}))
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, repo1.NumOpenMilestones+repo2.NumOpenMilestones, milestoneStats.OpenCount)
|
||||
assert.EqualValues(t, repo1.NumClosedMilestones+repo2.NumClosedMilestones, milestoneStats.ClosedCount)
|
||||
|
|
|
@ -273,6 +273,10 @@ func DeleteTime(t *TrackedTime) error {
|
|||
return err
|
||||
}
|
||||
|
||||
if err := t.loadAttributes(sess); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := deleteTime(sess, t); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -312,10 +316,8 @@ func deleteTime(e Engine, t *TrackedTime) error {
|
|||
|
||||
// GetTrackedTimeByID returns raw TrackedTime without loading attributes by id
|
||||
func GetTrackedTimeByID(id int64) (*TrackedTime, error) {
|
||||
time := &TrackedTime{
|
||||
ID: id,
|
||||
}
|
||||
has, err := x.Get(time)
|
||||
time := new(TrackedTime)
|
||||
has, err := x.ID(id).Get(time)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if !has {
|
||||
|
|
|
@ -64,14 +64,18 @@ func getIssueWatch(e Engine, userID, issueID int64) (iw *IssueWatch, exists bool
|
|||
return
|
||||
}
|
||||
|
||||
// GetIssueWatchersIDs returns IDs of subscribers to a given issue id
|
||||
// GetIssueWatchersIDs returns IDs of subscribers or explicit unsubscribers to a given issue id
|
||||
// but avoids joining with `user` for performance reasons
|
||||
// User permissions must be verified elsewhere if required
|
||||
func GetIssueWatchersIDs(issueID int64) ([]int64, error) {
|
||||
func GetIssueWatchersIDs(issueID int64, watching bool) ([]int64, error) {
|
||||
return getIssueWatchersIDs(x, issueID, watching)
|
||||
}
|
||||
|
||||
func getIssueWatchersIDs(e Engine, issueID int64, watching bool) ([]int64, error) {
|
||||
ids := make([]int64, 0, 64)
|
||||
return ids, x.Table("issue_watch").
|
||||
return ids, e.Table("issue_watch").
|
||||
Where("issue_id=?", issueID).
|
||||
And("is_watching = ?", true).
|
||||
And("is_watching = ?", watching).
|
||||
Select("user_id").
|
||||
Find(&ids)
|
||||
}
|
||||
|
|
|
@ -12,7 +12,6 @@ import (
|
|||
"fmt"
|
||||
"net/smtp"
|
||||
"net/textproto"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/auth/ldap"
|
||||
|
@ -301,7 +300,7 @@ func (source *LoginSource) SSPI() *SSPIConfig {
|
|||
// CreateLoginSource inserts a LoginSource in the DB if not already
|
||||
// existing with the given name.
|
||||
func CreateLoginSource(source *LoginSource) error {
|
||||
has, err := x.Get(&LoginSource{Name: source.Name})
|
||||
has, err := x.Where("name=?", source.Name).Exist(new(LoginSource))
|
||||
if err != nil {
|
||||
return err
|
||||
} else if has {
|
||||
|
@ -455,13 +454,9 @@ func composeFullName(firstname, surname, username string) string {
|
|||
}
|
||||
}
|
||||
|
||||
var (
|
||||
alphaDashDotPattern = regexp.MustCompile(`[^\w-\.]`)
|
||||
)
|
||||
|
||||
// LoginViaLDAP queries if login/password is valid against the LDAP directory pool,
|
||||
// and create a local user if success when enabled.
|
||||
func LoginViaLDAP(user *User, login, password string, source *LoginSource, autoRegister bool) (*User, error) {
|
||||
func LoginViaLDAP(user *User, login, password string, source *LoginSource) (*User, error) {
|
||||
sr := source.Cfg.(*LDAPConfig).SearchEntry(login, password, source.Type == LoginDLDAP)
|
||||
if sr == nil {
|
||||
// User not in LDAP, do nothing
|
||||
|
@ -473,17 +468,25 @@ func LoginViaLDAP(user *User, login, password string, source *LoginSource, autoR
|
|||
// Update User admin flag if exist
|
||||
if isExist, err := IsUserExist(0, sr.Username); err != nil {
|
||||
return nil, err
|
||||
} else if isExist &&
|
||||
!user.ProhibitLogin && len(source.LDAP().AdminFilter) > 0 && user.IsAdmin != sr.IsAdmin {
|
||||
// Change existing admin flag only if AdminFilter option is set
|
||||
user.IsAdmin = sr.IsAdmin
|
||||
err = UpdateUserCols(user, "is_admin")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if isExist {
|
||||
if user == nil {
|
||||
user, err = GetUserByName(sr.Username)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if user != nil &&
|
||||
!user.ProhibitLogin && len(source.LDAP().AdminFilter) > 0 && user.IsAdmin != sr.IsAdmin {
|
||||
// Change existing admin flag only if AdminFilter option is set
|
||||
user.IsAdmin = sr.IsAdmin
|
||||
err = UpdateUserCols(user, "is_admin")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !autoRegister {
|
||||
if user != nil {
|
||||
if isAttributeSSHPublicKeySet && synchronizeLdapSSHPublicKeys(user, source, sr.SSHPublicKey) {
|
||||
return user, RewriteAllPublicKeys()
|
||||
}
|
||||
|
@ -495,10 +498,6 @@ func LoginViaLDAP(user *User, login, password string, source *LoginSource, autoR
|
|||
if len(sr.Username) == 0 {
|
||||
sr.Username = login
|
||||
}
|
||||
// Validate username make sure it satisfies requirement.
|
||||
if alphaDashDotPattern.MatchString(sr.Username) {
|
||||
return nil, fmt.Errorf("Invalid pattern for attribute 'username' [%s]: must be valid alpha or numeric or dash(-_) or dot characters", sr.Username)
|
||||
}
|
||||
|
||||
if len(sr.Mail) == 0 {
|
||||
sr.Mail = fmt.Sprintf("%s@localhost", sr.Username)
|
||||
|
@ -594,7 +593,7 @@ func SMTPAuth(a smtp.Auth, cfg *SMTPConfig) error {
|
|||
|
||||
// LoginViaSMTP queries if login/password is valid against the SMTP,
|
||||
// and create a local user if success when enabled.
|
||||
func LoginViaSMTP(user *User, login, password string, sourceID int64, cfg *SMTPConfig, autoRegister bool) (*User, error) {
|
||||
func LoginViaSMTP(user *User, login, password string, sourceID int64, cfg *SMTPConfig) (*User, error) {
|
||||
// Verify allowed domains.
|
||||
if len(cfg.AllowedDomains) > 0 {
|
||||
idx := strings.Index(login, "@")
|
||||
|
@ -625,7 +624,7 @@ func LoginViaSMTP(user *User, login, password string, sourceID int64, cfg *SMTPC
|
|||
return nil, err
|
||||
}
|
||||
|
||||
if !autoRegister {
|
||||
if user != nil {
|
||||
return user, nil
|
||||
}
|
||||
|
||||
|
@ -657,33 +656,41 @@ func LoginViaSMTP(user *User, login, password string, sourceID int64, cfg *SMTPC
|
|||
|
||||
// LoginViaPAM queries if login/password is valid against the PAM,
|
||||
// and create a local user if success when enabled.
|
||||
func LoginViaPAM(user *User, login, password string, sourceID int64, cfg *PAMConfig, autoRegister bool) (*User, error) {
|
||||
if err := pam.Auth(cfg.ServiceName, login, password); err != nil {
|
||||
func LoginViaPAM(user *User, login, password string, sourceID int64, cfg *PAMConfig) (*User, error) {
|
||||
pamLogin, err := pam.Auth(cfg.ServiceName, login, password)
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), "Authentication failure") {
|
||||
return nil, ErrUserNotExist{0, login, 0}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !autoRegister {
|
||||
if user != nil {
|
||||
return user, nil
|
||||
}
|
||||
|
||||
// Allow PAM sources with `@` in their name, like from Active Directory
|
||||
username := pamLogin
|
||||
idx := strings.Index(pamLogin, "@")
|
||||
if idx > -1 {
|
||||
username = pamLogin[:idx]
|
||||
}
|
||||
|
||||
user = &User{
|
||||
LowerName: strings.ToLower(login),
|
||||
Name: login,
|
||||
Email: login,
|
||||
LowerName: strings.ToLower(username),
|
||||
Name: username,
|
||||
Email: pamLogin,
|
||||
Passwd: password,
|
||||
LoginType: LoginPAM,
|
||||
LoginSource: sourceID,
|
||||
LoginName: login,
|
||||
LoginName: login, // This is what the user typed in
|
||||
IsActive: true,
|
||||
}
|
||||
return user, CreateUser(user)
|
||||
}
|
||||
|
||||
// ExternalUserLogin attempts a login using external source types.
|
||||
func ExternalUserLogin(user *User, login, password string, source *LoginSource, autoRegister bool) (*User, error) {
|
||||
func ExternalUserLogin(user *User, login, password string, source *LoginSource) (*User, error) {
|
||||
if !source.IsActived {
|
||||
return nil, ErrLoginSourceNotActived
|
||||
}
|
||||
|
@ -691,11 +698,11 @@ func ExternalUserLogin(user *User, login, password string, source *LoginSource,
|
|||
var err error
|
||||
switch source.Type {
|
||||
case LoginLDAP, LoginDLDAP:
|
||||
user, err = LoginViaLDAP(user, login, password, source, autoRegister)
|
||||
user, err = LoginViaLDAP(user, login, password, source)
|
||||
case LoginSMTP:
|
||||
user, err = LoginViaSMTP(user, login, password, source.ID, source.Cfg.(*SMTPConfig), autoRegister)
|
||||
user, err = LoginViaSMTP(user, login, password, source.ID, source.Cfg.(*SMTPConfig))
|
||||
case LoginPAM:
|
||||
user, err = LoginViaPAM(user, login, password, source.ID, source.Cfg.(*PAMConfig), autoRegister)
|
||||
user, err = LoginViaPAM(user, login, password, source.ID, source.Cfg.(*PAMConfig))
|
||||
default:
|
||||
return nil, ErrUnsupportedLoginType
|
||||
}
|
||||
|
@ -775,7 +782,7 @@ func UserSignIn(username, password string) (*User, error) {
|
|||
return nil, ErrLoginSourceNotExist{user.LoginSource}
|
||||
}
|
||||
|
||||
return ExternalUserLogin(user, user.LoginName, password, &source, false)
|
||||
return ExternalUserLogin(user, user.LoginName, password, &source)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -789,7 +796,7 @@ func UserSignIn(username, password string) (*User, error) {
|
|||
// don't try to authenticate against OAuth2 and SSPI sources here
|
||||
continue
|
||||
}
|
||||
authUser, err := ExternalUserLogin(nil, username, password, source, true)
|
||||
authUser, err := ExternalUserLogin(nil, username, password, source)
|
||||
if err == nil {
|
||||
return authUser, nil
|
||||
}
|
||||
|
|
|
@ -292,6 +292,52 @@ var migrations = []Migration{
|
|||
NewMigration("Add block on rejected reviews branch protection", addBlockOnRejectedReviews),
|
||||
}
|
||||
|
||||
// GetCurrentDBVersion returns the current db version
|
||||
func GetCurrentDBVersion(x *xorm.Engine) (int64, error) {
|
||||
if err := x.Sync(new(Version)); err != nil {
|
||||
return -1, fmt.Errorf("sync: %v", err)
|
||||
}
|
||||
|
||||
currentVersion := &Version{ID: 1}
|
||||
has, err := x.Get(currentVersion)
|
||||
if err != nil {
|
||||
return -1, fmt.Errorf("get: %v", err)
|
||||
}
|
||||
if !has {
|
||||
return -1, nil
|
||||
}
|
||||
return currentVersion.Version, nil
|
||||
}
|
||||
|
||||
// ExpectedVersion returns the expected db version
|
||||
func ExpectedVersion() int64 {
|
||||
return int64(minDBVersion + len(migrations))
|
||||
}
|
||||
|
||||
// EnsureUpToDate will check if the db is at the correct version
|
||||
func EnsureUpToDate(x *xorm.Engine) error {
|
||||
currentDB, err := GetCurrentDBVersion(x)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if currentDB < 0 {
|
||||
return fmt.Errorf("Database has not been initialised")
|
||||
}
|
||||
|
||||
if minDBVersion > currentDB {
|
||||
return fmt.Errorf("DB version %d (<= %d) is too old for auto-migration. Upgrade to Gitea 1.6.4 first then upgrade to this version", currentDB, minDBVersion)
|
||||
}
|
||||
|
||||
expected := ExpectedVersion()
|
||||
|
||||
if currentDB != expected {
|
||||
return fmt.Errorf(`Current database version %d is not equal to the expected version %d. Please run "gitea [--config /path/to/app.ini] migrate" to update the database version`, currentDB, expected)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Migrate database to current version
|
||||
func Migrate(x *xorm.Engine) error {
|
||||
if err := x.Sync(new(Version)); err != nil {
|
||||
|
|
|
@ -26,23 +26,38 @@ func deleteOrphanedAttachments(x *xorm.Engine) error {
|
|||
sess := x.NewSession()
|
||||
defer sess.Close()
|
||||
|
||||
err := sess.BufferSize(setting.Database.IterateBufferSize).
|
||||
Where("`issue_id` = 0 and (`release_id` = 0 or `release_id` not in (select `id` from `release`))").Cols("uuid").
|
||||
Iterate(new(Attachment),
|
||||
func(idx int, bean interface{}) error {
|
||||
attachment := bean.(*Attachment)
|
||||
|
||||
if err := os.RemoveAll(models.AttachmentLocalPath(attachment.UUID)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := sess.ID(attachment.ID).NoAutoCondition().Delete(attachment)
|
||||
return err
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
var limit = setting.Database.IterateBufferSize
|
||||
if limit <= 0 {
|
||||
limit = 50
|
||||
}
|
||||
|
||||
return sess.Commit()
|
||||
for {
|
||||
attachements := make([]Attachment, 0, limit)
|
||||
if err := sess.Where("`issue_id` = 0 and (`release_id` = 0 or `release_id` not in (select `id` from `release`))").
|
||||
Cols("id, uuid").Limit(limit).
|
||||
Asc("id").
|
||||
Find(&attachements); err != nil {
|
||||
return err
|
||||
}
|
||||
if len(attachements) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var ids = make([]int64, 0, limit)
|
||||
for _, attachment := range attachements {
|
||||
ids = append(ids, attachment.ID)
|
||||
}
|
||||
if _, err := sess.In("id", ids).Delete(new(Attachment)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, attachment := range attachements {
|
||||
if err := os.RemoveAll(models.AttachmentLocalPath(attachment.UUID)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if len(attachements) < limit {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,6 +46,12 @@ type Engine interface {
|
|||
Asc(colNames ...string) *xorm.Session
|
||||
}
|
||||
|
||||
const (
|
||||
// When queries are broken down in parts because of the number
|
||||
// of parameters, attempt to break by this amount
|
||||
maxQueryParameters = 300
|
||||
)
|
||||
|
||||
var (
|
||||
x *xorm.Engine
|
||||
tables []interface{}
|
||||
|
|
|
@ -7,6 +7,7 @@ package models
|
|||
import (
|
||||
"fmt"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
)
|
||||
|
||||
|
@ -281,9 +282,9 @@ func (nl NotificationList) getPendingRepoIDs() []int64 {
|
|||
}
|
||||
|
||||
// LoadRepos loads repositories from database
|
||||
func (nl NotificationList) LoadRepos() (RepositoryList, error) {
|
||||
func (nl NotificationList) LoadRepos() (RepositoryList, []int, error) {
|
||||
if len(nl) == 0 {
|
||||
return RepositoryList{}, nil
|
||||
return RepositoryList{}, []int{}, nil
|
||||
}
|
||||
|
||||
var repoIDs = nl.getPendingRepoIDs()
|
||||
|
@ -298,7 +299,7 @@ func (nl NotificationList) LoadRepos() (RepositoryList, error) {
|
|||
In("id", repoIDs[:limit]).
|
||||
Rows(new(Repository))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
for rows.Next() {
|
||||
|
@ -306,7 +307,7 @@ func (nl NotificationList) LoadRepos() (RepositoryList, error) {
|
|||
err = rows.Scan(&repo)
|
||||
if err != nil {
|
||||
rows.Close()
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
repos[repo.ID] = &repo
|
||||
|
@ -317,14 +318,21 @@ func (nl NotificationList) LoadRepos() (RepositoryList, error) {
|
|||
repoIDs = repoIDs[limit:]
|
||||
}
|
||||
|
||||
failed := []int{}
|
||||
|
||||
var reposList = make(RepositoryList, 0, len(repoIDs))
|
||||
for _, notification := range nl {
|
||||
for i, notification := range nl {
|
||||
if notification.Repository == nil {
|
||||
notification.Repository = repos[notification.RepoID]
|
||||
}
|
||||
if notification.Repository == nil {
|
||||
log.Error("Notification[%d]: RepoID: %d not found", notification.ID, notification.RepoID)
|
||||
failed = append(failed, i)
|
||||
continue
|
||||
}
|
||||
var found bool
|
||||
for _, r := range reposList {
|
||||
if r.ID == notification.Repository.ID {
|
||||
if r.ID == notification.RepoID {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
|
@ -333,7 +341,7 @@ func (nl NotificationList) LoadRepos() (RepositoryList, error) {
|
|||
reposList = append(reposList, notification.Repository)
|
||||
}
|
||||
}
|
||||
return reposList, nil
|
||||
return reposList, failed, nil
|
||||
}
|
||||
|
||||
func (nl NotificationList) getPendingIssueIDs() []int64 {
|
||||
|
@ -350,9 +358,9 @@ func (nl NotificationList) getPendingIssueIDs() []int64 {
|
|||
}
|
||||
|
||||
// LoadIssues loads issues from database
|
||||
func (nl NotificationList) LoadIssues() error {
|
||||
func (nl NotificationList) LoadIssues() ([]int, error) {
|
||||
if len(nl) == 0 {
|
||||
return nil
|
||||
return []int{}, nil
|
||||
}
|
||||
|
||||
var issueIDs = nl.getPendingIssueIDs()
|
||||
|
@ -367,7 +375,7 @@ func (nl NotificationList) LoadIssues() error {
|
|||
In("id", issueIDs[:limit]).
|
||||
Rows(new(Issue))
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for rows.Next() {
|
||||
|
@ -375,7 +383,7 @@ func (nl NotificationList) LoadIssues() error {
|
|||
err = rows.Scan(&issue)
|
||||
if err != nil {
|
||||
rows.Close()
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
issues[issue.ID] = &issue
|
||||
|
@ -386,13 +394,38 @@ func (nl NotificationList) LoadIssues() error {
|
|||
issueIDs = issueIDs[limit:]
|
||||
}
|
||||
|
||||
for _, notification := range nl {
|
||||
failures := []int{}
|
||||
|
||||
for i, notification := range nl {
|
||||
if notification.Issue == nil {
|
||||
notification.Issue = issues[notification.IssueID]
|
||||
if notification.Issue == nil {
|
||||
log.Error("Notification[%d]: IssueID: %d Not Found", notification.ID, notification.IssueID)
|
||||
failures = append(failures, i)
|
||||
continue
|
||||
}
|
||||
notification.Issue.Repo = notification.Repository
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return failures, nil
|
||||
}
|
||||
|
||||
// Without returns the notification list without the failures
|
||||
func (nl NotificationList) Without(failures []int) NotificationList {
|
||||
if len(failures) == 0 {
|
||||
return nl
|
||||
}
|
||||
remaining := make([]*Notification, 0, len(nl))
|
||||
last := -1
|
||||
var i int
|
||||
for _, i = range failures {
|
||||
remaining = append(remaining, nl[last+1:i]...)
|
||||
last = i
|
||||
}
|
||||
if len(nl) > i {
|
||||
remaining = append(remaining, nl[i+1:]...)
|
||||
}
|
||||
return remaining
|
||||
}
|
||||
|
||||
func (nl NotificationList) getPendingCommentIDs() []int64 {
|
||||
|
@ -409,9 +442,9 @@ func (nl NotificationList) getPendingCommentIDs() []int64 {
|
|||
}
|
||||
|
||||
// LoadComments loads comments from database
|
||||
func (nl NotificationList) LoadComments() error {
|
||||
func (nl NotificationList) LoadComments() ([]int, error) {
|
||||
if len(nl) == 0 {
|
||||
return nil
|
||||
return []int{}, nil
|
||||
}
|
||||
|
||||
var commentIDs = nl.getPendingCommentIDs()
|
||||
|
@ -426,7 +459,7 @@ func (nl NotificationList) LoadComments() error {
|
|||
In("id", commentIDs[:limit]).
|
||||
Rows(new(Comment))
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for rows.Next() {
|
||||
|
@ -434,7 +467,7 @@ func (nl NotificationList) LoadComments() error {
|
|||
err = rows.Scan(&comment)
|
||||
if err != nil {
|
||||
rows.Close()
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
comments[comment.ID] = &comment
|
||||
|
@ -445,13 +478,19 @@ func (nl NotificationList) LoadComments() error {
|
|||
commentIDs = commentIDs[limit:]
|
||||
}
|
||||
|
||||
for _, notification := range nl {
|
||||
failures := []int{}
|
||||
for i, notification := range nl {
|
||||
if notification.CommentID > 0 && notification.Comment == nil && comments[notification.CommentID] != nil {
|
||||
notification.Comment = comments[notification.CommentID]
|
||||
if notification.Comment == nil {
|
||||
log.Error("Notification[%d]: CommentID[%d] failed to load", notification.ID, notification.CommentID)
|
||||
failures = append(failures, i)
|
||||
continue
|
||||
}
|
||||
notification.Comment.Issue = notification.Issue
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return failures, nil
|
||||
}
|
||||
|
||||
// GetNotificationCount returns the notification count for user
|
||||
|
|
|
@ -470,12 +470,12 @@ func GetOwnedOrgsByUserIDDesc(userID int64, desc string) ([]*User, error) {
|
|||
func GetOrgsCanCreateRepoByUserID(userID int64) ([]*User, error) {
|
||||
orgs := make([]*User, 0, 10)
|
||||
|
||||
return orgs, x.Join("INNER", "`team_user`", "`team_user`.org_id=`user`.id").
|
||||
Join("INNER", "`team`", "`team`.id=`team_user`.team_id").
|
||||
Where("`team_user`.uid=?", userID).
|
||||
And(builder.Eq{"`team`.authorize": AccessModeOwner}.Or(builder.Eq{"`team`.can_create_org_repo": true})).
|
||||
Desc("`user`.updated_unix").
|
||||
Find(&orgs)
|
||||
return orgs, x.Where(builder.In("id", builder.Select("`user`.id").From("`user`").
|
||||
Join("INNER", "`team_user`", "`team_user`.org_id = `user`.id").
|
||||
Join("INNER", "`team`", "`team`.id = `team_user`.team_id").
|
||||
Where(builder.Eq{"`team_user`.uid": userID}).
|
||||
And(builder.Eq{"`team`.authorize": AccessModeOwner}.Or(builder.Eq{"`team`.can_create_org_repo": true})))).
|
||||
Desc("`user`.updated_unix").Find(&orgs)
|
||||
}
|
||||
|
||||
// GetOrgUsersByUserID returns all organization-user relations by user ID.
|
||||
|
|
|
@ -399,7 +399,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) {
|
|||
|
||||
// Create org.
|
||||
org := &User{
|
||||
Name: "All repo",
|
||||
Name: "All_repo",
|
||||
IsActive: true,
|
||||
Type: UserTypeOrganization,
|
||||
Visibility: structs.VisibleTypePublic,
|
||||
|
|
156
models/pull.go
156
models/pull.go
|
@ -35,6 +35,7 @@ const (
|
|||
PullRequestStatusChecking
|
||||
PullRequestStatusMergeable
|
||||
PullRequestStatusManuallyMerged
|
||||
PullRequestStatusError
|
||||
)
|
||||
|
||||
// PullRequest represents relation between pull request and repositories.
|
||||
|
@ -67,7 +68,11 @@ type PullRequest struct {
|
|||
// MustHeadUserName returns the HeadRepo's username if failed return blank
|
||||
func (pr *PullRequest) MustHeadUserName() string {
|
||||
if err := pr.LoadHeadRepo(); err != nil {
|
||||
log.Error("LoadHeadRepo: %v", err)
|
||||
if !IsErrRepoNotExist(err) {
|
||||
log.Error("LoadHeadRepo: %v", err)
|
||||
} else {
|
||||
log.Warn("LoadHeadRepo %d but repository does not exist: %v", pr.HeadRepoID, err)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
return pr.HeadRepo.MustOwnerName()
|
||||
|
@ -175,7 +180,16 @@ func (pr *PullRequest) GetDefaultMergeMessage() string {
|
|||
return ""
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("Merge branch '%s' of %s/%s into %s", pr.HeadBranch, pr.MustHeadUserName(), pr.HeadRepo.Name, pr.BaseBranch)
|
||||
if err := pr.LoadIssue(); err != nil {
|
||||
log.Error("Cannot load issue %d for PR id %d: Error: %v", pr.IssueID, pr.ID, err)
|
||||
return ""
|
||||
}
|
||||
|
||||
if pr.BaseRepoID == pr.HeadRepoID {
|
||||
return fmt.Sprintf("Merge pull request '%s' (#%d) from %s into %s", pr.Issue.Title, pr.Issue.Index, pr.HeadBranch, pr.BaseBranch)
|
||||
}
|
||||
|
||||
return fmt.Sprintf("Merge pull request '%s' (#%d) from %s:%s into %s", pr.Issue.Title, pr.Issue.Index, pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseBranch)
|
||||
}
|
||||
|
||||
// GetCommitMessages returns the commit messages between head and merge base (if there is one)
|
||||
|
@ -384,6 +398,13 @@ func (pr *PullRequest) GetDefaultSquashMessage() string {
|
|||
log.Error("LoadIssue: %v", err)
|
||||
return ""
|
||||
}
|
||||
if err := pr.LoadBaseRepo(); err != nil {
|
||||
log.Error("LoadBaseRepo: %v", err)
|
||||
return ""
|
||||
}
|
||||
if pr.BaseRepo.UnitEnabled(UnitTypeExternalTracker) {
|
||||
return fmt.Sprintf("%s (!%d)", pr.Issue.Title, pr.Issue.Index)
|
||||
}
|
||||
return fmt.Sprintf("%s (#%d)", pr.Issue.Title, pr.Issue.Index)
|
||||
}
|
||||
|
||||
|
@ -408,7 +429,7 @@ func (pr *PullRequest) apiFormat(e Engine) *api.PullRequest {
|
|||
err error
|
||||
)
|
||||
if err = pr.Issue.loadRepo(e); err != nil {
|
||||
log.Error("loadRepo[%d]: %v", pr.ID, err)
|
||||
log.Error("pr.Issue.loadRepo[%d]: %v", pr.ID, err)
|
||||
return nil
|
||||
}
|
||||
apiIssue := pr.Issue.apiFormat(e)
|
||||
|
@ -419,19 +440,14 @@ func (pr *PullRequest) apiFormat(e Engine) *api.PullRequest {
|
|||
return nil
|
||||
}
|
||||
}
|
||||
if pr.HeadRepo == nil {
|
||||
if pr.HeadRepoID != 0 && pr.HeadRepo == nil {
|
||||
pr.HeadRepo, err = getRepositoryByID(e, pr.HeadRepoID)
|
||||
if err != nil {
|
||||
if err != nil && !IsErrRepoNotExist(err) {
|
||||
log.Error("GetRepositoryById[%d]: %v", pr.ID, err)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if err = pr.Issue.loadRepo(e); err != nil {
|
||||
log.Error("pr.Issue.loadRepo[%d]: %v", pr.ID, err)
|
||||
return nil
|
||||
}
|
||||
|
||||
apiPullRequest := &api.PullRequest{
|
||||
ID: pr.ID,
|
||||
URL: pr.Issue.HTMLURL(),
|
||||
|
@ -483,37 +499,45 @@ func (pr *PullRequest) apiFormat(e Engine) *api.PullRequest {
|
|||
apiPullRequest.Base = apiBaseBranchInfo
|
||||
}
|
||||
|
||||
headBranch, err = pr.HeadRepo.GetBranch(pr.HeadBranch)
|
||||
if err != nil {
|
||||
if git.IsErrBranchNotExist(err) {
|
||||
apiPullRequest.Head = nil
|
||||
} else {
|
||||
log.Error("GetBranch[%s]: %v", pr.HeadBranch, err)
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
apiHeadBranchInfo := &api.PRBranchInfo{
|
||||
Name: pr.HeadBranch,
|
||||
Ref: pr.HeadBranch,
|
||||
RepoID: pr.HeadRepoID,
|
||||
Repository: pr.HeadRepo.innerAPIFormat(e, AccessModeNone, false),
|
||||
}
|
||||
headCommit, err = headBranch.GetCommit()
|
||||
if pr.HeadRepo != nil {
|
||||
headBranch, err = pr.HeadRepo.GetBranch(pr.HeadBranch)
|
||||
if err != nil {
|
||||
if git.IsErrNotExist(err) {
|
||||
apiHeadBranchInfo.Sha = ""
|
||||
if git.IsErrBranchNotExist(err) {
|
||||
apiPullRequest.Head = nil
|
||||
} else {
|
||||
log.Error("GetCommit[%s]: %v", headBranch.Name, err)
|
||||
log.Error("GetBranch[%s]: %v", pr.HeadBranch, err)
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
apiHeadBranchInfo.Sha = headCommit.ID.String()
|
||||
apiHeadBranchInfo := &api.PRBranchInfo{
|
||||
Name: pr.HeadBranch,
|
||||
Ref: pr.HeadBranch,
|
||||
RepoID: pr.HeadRepoID,
|
||||
Repository: pr.HeadRepo.innerAPIFormat(e, AccessModeNone, false),
|
||||
}
|
||||
headCommit, err = headBranch.GetCommit()
|
||||
if err != nil {
|
||||
if git.IsErrNotExist(err) {
|
||||
apiHeadBranchInfo.Sha = ""
|
||||
} else {
|
||||
log.Error("GetCommit[%s]: %v", headBranch.Name, err)
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
apiHeadBranchInfo.Sha = headCommit.ID.String()
|
||||
}
|
||||
apiPullRequest.Head = apiHeadBranchInfo
|
||||
}
|
||||
} else {
|
||||
apiPullRequest.Head = &api.PRBranchInfo{
|
||||
Name: pr.HeadBranch,
|
||||
Ref: fmt.Sprintf("refs/pull/%d/head", pr.Index),
|
||||
RepoID: -1,
|
||||
}
|
||||
apiPullRequest.Head = apiHeadBranchInfo
|
||||
}
|
||||
|
||||
if pr.Status != PullRequestStatusChecking {
|
||||
mergeable := pr.Status != PullRequestStatusConflict && !pr.IsWorkInProgress()
|
||||
mergeable := !(pr.Status == PullRequestStatusConflict || pr.Status == PullRequestStatusError) && !pr.IsWorkInProgress()
|
||||
apiPullRequest.Mergeable = mergeable
|
||||
}
|
||||
if pr.HasMerged {
|
||||
|
@ -634,44 +658,66 @@ func (pr *PullRequest) CheckUserAllowedToMerge(doer *User) (err error) {
|
|||
}
|
||||
|
||||
// SetMerged sets a pull request to merged and closes the corresponding issue
|
||||
func (pr *PullRequest) SetMerged() (err error) {
|
||||
func (pr *PullRequest) SetMerged() (bool, error) {
|
||||
if pr.HasMerged {
|
||||
return fmt.Errorf("PullRequest[%d] already merged", pr.Index)
|
||||
return false, fmt.Errorf("PullRequest[%d] already merged", pr.Index)
|
||||
}
|
||||
if pr.MergedCommitID == "" || pr.MergedUnix == 0 || pr.Merger == nil {
|
||||
return fmt.Errorf("Unable to merge PullRequest[%d], some required fields are empty", pr.Index)
|
||||
return false, fmt.Errorf("Unable to merge PullRequest[%d], some required fields are empty", pr.Index)
|
||||
}
|
||||
|
||||
pr.HasMerged = true
|
||||
|
||||
sess := x.NewSession()
|
||||
defer sess.Close()
|
||||
if err = sess.Begin(); err != nil {
|
||||
return err
|
||||
if err := sess.Begin(); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err = pr.loadIssue(sess); err != nil {
|
||||
return err
|
||||
if _, err := sess.Exec("UPDATE `issue` SET `repo_id` = `repo_id` WHERE `id` = ?", pr.IssueID); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err = pr.Issue.loadRepo(sess); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = pr.Issue.Repo.getOwner(sess); err != nil {
|
||||
return err
|
||||
if _, err := sess.Exec("UPDATE `pull_request` SET `issue_id` = `issue_id` WHERE `id` = ?", pr.ID); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if _, err = pr.Issue.changeStatus(sess, pr.Merger, true); err != nil {
|
||||
return fmt.Errorf("Issue.changeStatus: %v", err)
|
||||
}
|
||||
if _, err = sess.ID(pr.ID).Cols("has_merged, status, merged_commit_id, merger_id, merged_unix").Update(pr); err != nil {
|
||||
return fmt.Errorf("update pull request: %v", err)
|
||||
pr.Issue = nil
|
||||
if err := pr.loadIssue(sess); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err = sess.Commit(); err != nil {
|
||||
return fmt.Errorf("Commit: %v", err)
|
||||
if tmpPr, err := getPullRequestByID(sess, pr.ID); err != nil {
|
||||
return false, err
|
||||
} else if tmpPr.HasMerged {
|
||||
if pr.Issue.IsClosed {
|
||||
return false, nil
|
||||
}
|
||||
return false, fmt.Errorf("PullRequest[%d] already merged but it's associated issue [%d] is not closed", pr.Index, pr.IssueID)
|
||||
} else if pr.Issue.IsClosed {
|
||||
return false, fmt.Errorf("PullRequest[%d] already closed", pr.Index)
|
||||
}
|
||||
return nil
|
||||
|
||||
if err := pr.Issue.loadRepo(sess); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := pr.Issue.Repo.getOwner(sess); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if _, err := pr.Issue.changeStatus(sess, pr.Merger, true); err != nil {
|
||||
return false, fmt.Errorf("Issue.changeStatus: %v", err)
|
||||
}
|
||||
|
||||
if _, err := sess.Where("id = ?", pr.ID).Cols("has_merged, status, merged_commit_id, merger_id, merged_unix").Update(pr); err != nil {
|
||||
return false, fmt.Errorf("Failed to update pr[%d]: %v", pr.ID, err)
|
||||
}
|
||||
|
||||
if err := sess.Commit(); err != nil {
|
||||
return false, fmt.Errorf("Commit: %v", err)
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// NewPullRequest creates new pull request with labels for repository.
|
||||
|
@ -830,6 +876,12 @@ func (pr *PullRequest) UpdateCols(cols ...string) error {
|
|||
return err
|
||||
}
|
||||
|
||||
// UpdateColsIfNotMerged updates specific fields of a pull request if it has not been merged
|
||||
func (pr *PullRequest) UpdateColsIfNotMerged(cols ...string) error {
|
||||
_, err := x.Where("id = ? AND has_merged = ?", pr.ID, false).Cols(cols...).Update(pr)
|
||||
return err
|
||||
}
|
||||
|
||||
// IsWorkInProgress determine if the Pull Request is a Work In Progress by its title
|
||||
func (pr *PullRequest) IsWorkInProgress() bool {
|
||||
if err := pr.LoadIssue(); err != nil {
|
||||
|
|
147
models/repo.go
147
models/repo.go
|
@ -173,7 +173,6 @@ type Repository struct {
|
|||
NumMilestones int `xorm:"NOT NULL DEFAULT 0"`
|
||||
NumClosedMilestones int `xorm:"NOT NULL DEFAULT 0"`
|
||||
NumOpenMilestones int `xorm:"-"`
|
||||
NumReleases int `xorm:"-"`
|
||||
|
||||
IsPrivate bool `xorm:"INDEX"`
|
||||
IsEmpty bool `xorm:"INDEX"`
|
||||
|
@ -204,6 +203,14 @@ type Repository struct {
|
|||
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
|
||||
}
|
||||
|
||||
// SanitizedOriginalURL returns a sanitized OriginalURL
|
||||
func (repo *Repository) SanitizedOriginalURL() string {
|
||||
if repo.OriginalURL == "" {
|
||||
return ""
|
||||
}
|
||||
return util.SanitizeURLCredentials(repo.OriginalURL, false)
|
||||
}
|
||||
|
||||
// ColorFormat returns a colored string to represent this repo
|
||||
func (repo *Repository) ColorFormat(s fmt.State) {
|
||||
var ownerName interface{}
|
||||
|
@ -356,6 +363,8 @@ func (repo *Repository) innerAPIFormat(e Engine, mode AccessMode, isParent bool)
|
|||
allowSquash = config.AllowSquash
|
||||
}
|
||||
|
||||
numReleases, _ := GetReleaseCountByRepoID(repo.ID, FindReleasesOptions{IncludeDrafts: false, IncludeTags: true})
|
||||
|
||||
return &api.Repository{
|
||||
ID: repo.ID,
|
||||
Owner: repo.Owner.APIFormat(),
|
||||
|
@ -379,7 +388,7 @@ func (repo *Repository) innerAPIFormat(e Engine, mode AccessMode, isParent bool)
|
|||
Watchers: repo.NumWatches,
|
||||
OpenIssues: repo.NumOpenIssues,
|
||||
OpenPulls: repo.NumOpenPulls,
|
||||
Releases: repo.NumReleases,
|
||||
Releases: int(numReleases),
|
||||
DefaultBranch: repo.DefaultBranch,
|
||||
Created: repo.CreatedUnix.AsTime(),
|
||||
Updated: repo.UpdatedUnix.AsTime(),
|
||||
|
@ -960,6 +969,21 @@ func CheckCreateRepository(doer, u *User, name string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) {
|
||||
hookNames = []string{"pre-receive", "update", "post-receive"}
|
||||
hookTpls = []string{
|
||||
fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
|
||||
fmt.Sprintf("#!/usr/bin/env %s\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\n\"${hook}\" $1 $2 $3\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
|
||||
fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
|
||||
}
|
||||
giteaHookTpls = []string{
|
||||
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' pre-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
|
||||
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' update $1 $2 $3\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
|
||||
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// CreateDelegateHooks creates all the hooks scripts for the repo
|
||||
func CreateDelegateHooks(repoPath string) error {
|
||||
return createDelegateHooks(repoPath)
|
||||
|
@ -967,21 +991,7 @@ func CreateDelegateHooks(repoPath string) error {
|
|||
|
||||
// createDelegateHooks creates all the hooks scripts for the repo
|
||||
func createDelegateHooks(repoPath string) (err error) {
|
||||
|
||||
var (
|
||||
hookNames = []string{"pre-receive", "update", "post-receive"}
|
||||
hookTpls = []string{
|
||||
fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
|
||||
fmt.Sprintf("#!/usr/bin/env %s\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" || continue\n\"${hook}\" $1 $2 $3\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
|
||||
fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
|
||||
}
|
||||
giteaHookTpls = []string{
|
||||
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' pre-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
|
||||
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' update $1 $2 $3\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
|
||||
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
|
||||
}
|
||||
)
|
||||
|
||||
hookNames, hookTpls, giteaHookTpls := getHookTemplates()
|
||||
hookDir := filepath.Join(repoPath, "hooks")
|
||||
|
||||
for i, hookName := range hookNames {
|
||||
|
@ -1000,16 +1010,94 @@ func createDelegateHooks(repoPath string) (err error) {
|
|||
return fmt.Errorf("write old hook file '%s': %v", oldHookPath, err)
|
||||
}
|
||||
|
||||
if err = ensureExecutable(oldHookPath); err != nil {
|
||||
return fmt.Errorf("Unable to set %s executable. Error %v", oldHookPath, err)
|
||||
}
|
||||
|
||||
if err = os.Remove(newHookPath); err != nil && !os.IsNotExist(err) {
|
||||
return fmt.Errorf("unable to pre-remove new hook file '%s' prior to rewriting: %v", newHookPath, err)
|
||||
}
|
||||
if err = ioutil.WriteFile(newHookPath, []byte(giteaHookTpls[i]), 0777); err != nil {
|
||||
return fmt.Errorf("write new hook file '%s': %v", newHookPath, err)
|
||||
}
|
||||
|
||||
if err = ensureExecutable(newHookPath); err != nil {
|
||||
return fmt.Errorf("Unable to set %s executable. Error %v", oldHookPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
func checkExecutable(filename string) bool {
|
||||
fileInfo, err := os.Stat(filename)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return (fileInfo.Mode() & 0100) > 0
|
||||
}
|
||||
|
||||
func ensureExecutable(filename string) error {
|
||||
fileInfo, err := os.Stat(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if (fileInfo.Mode() & 0100) > 0 {
|
||||
return nil
|
||||
}
|
||||
mode := fileInfo.Mode() | 0100
|
||||
return os.Chmod(filename, mode)
|
||||
}
|
||||
|
||||
// CheckDelegateHooks checks the hooks scripts for the repo
|
||||
func CheckDelegateHooks(repoPath string) ([]string, error) {
|
||||
hookNames, hookTpls, giteaHookTpls := getHookTemplates()
|
||||
|
||||
hookDir := filepath.Join(repoPath, "hooks")
|
||||
results := make([]string, 0, 10)
|
||||
|
||||
for i, hookName := range hookNames {
|
||||
oldHookPath := filepath.Join(hookDir, hookName)
|
||||
newHookPath := filepath.Join(hookDir, hookName+".d", "gitea")
|
||||
|
||||
cont := false
|
||||
if !com.IsExist(oldHookPath) {
|
||||
results = append(results, fmt.Sprintf("old hook file %s does not exist", oldHookPath))
|
||||
cont = true
|
||||
}
|
||||
if !com.IsExist(oldHookPath + ".d") {
|
||||
results = append(results, fmt.Sprintf("hooks directory %s does not exist", oldHookPath+".d"))
|
||||
cont = true
|
||||
}
|
||||
if !com.IsExist(newHookPath) {
|
||||
results = append(results, fmt.Sprintf("new hook file %s does not exist", newHookPath))
|
||||
cont = true
|
||||
}
|
||||
if cont {
|
||||
continue
|
||||
}
|
||||
contents, err := ioutil.ReadFile(oldHookPath)
|
||||
if err != nil {
|
||||
return results, err
|
||||
}
|
||||
if string(contents) != hookTpls[i] {
|
||||
results = append(results, fmt.Sprintf("old hook file %s is out of date", oldHookPath))
|
||||
}
|
||||
if !checkExecutable(oldHookPath) {
|
||||
results = append(results, fmt.Sprintf("old hook file %s is not executable", oldHookPath))
|
||||
}
|
||||
contents, err = ioutil.ReadFile(newHookPath)
|
||||
if err != nil {
|
||||
return results, err
|
||||
}
|
||||
if string(contents) != giteaHookTpls[i] {
|
||||
results = append(results, fmt.Sprintf("new hook file %s is out of date", newHookPath))
|
||||
}
|
||||
if !checkExecutable(newHookPath) {
|
||||
results = append(results, fmt.Sprintf("new hook file %s is not executable", newHookPath))
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// initRepoCommit temporarily changes with work directory.
|
||||
func initRepoCommit(tmpPath string, repo *Repository, u *User) (err error) {
|
||||
|
@ -1520,7 +1608,7 @@ func TransferOwnership(doer *User, newOwnerName string, repo *Repository) error
|
|||
}
|
||||
|
||||
if newOwner.IsOrganization() {
|
||||
if err := newOwner.GetTeams(); err != nil {
|
||||
if err := newOwner.getTeams(sess); err != nil {
|
||||
return fmt.Errorf("GetTeams: %v", err)
|
||||
}
|
||||
for _, t := range newOwner.Teams {
|
||||
|
@ -1848,6 +1936,18 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
|
|||
return err
|
||||
}
|
||||
|
||||
// Dependencies for issues in this repository
|
||||
if _, err = sess.In("issue_id", deleteCond).
|
||||
Delete(&IssueDependency{}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Delete dependencies for issues in other repositories
|
||||
if _, err = sess.In("dependency_id", deleteCond).
|
||||
Delete(&IssueDependency{}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err = sess.In("issue_id", deleteCond).
|
||||
Delete(&IssueUser{}); err != nil {
|
||||
return err
|
||||
|
@ -1868,6 +1968,11 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
|
|||
return err
|
||||
}
|
||||
|
||||
if _, err = sess.In("issue_id", deleteCond).
|
||||
Delete(&TrackedTime{}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
attachments = attachments[:0]
|
||||
if err = sess.Join("INNER", "issue", "issue.id = attachment.issue_id").
|
||||
Where("issue.repo_id = ?", repoID).
|
||||
|
@ -1902,6 +2007,12 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
|
|||
return err
|
||||
}
|
||||
|
||||
if len(repo.Topics) > 0 {
|
||||
if err = removeTopicsFromRepo(sess, repo.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: Remove repository files should be executed after transaction succeed.
|
||||
repoPath := repo.repoPath(sess)
|
||||
removeAllWithNotice(sess, "Delete repository files", repoPath)
|
||||
|
|
|
@ -202,3 +202,23 @@ func (repo *Repository) getRepoTeams(e Engine) (teams []*Team, err error) {
|
|||
func (repo *Repository) GetRepoTeams() ([]*Team, error) {
|
||||
return repo.getRepoTeams(x)
|
||||
}
|
||||
|
||||
// IsOwnerMemberCollaborator checks if a provided user is the owner, a collaborator or a member of a team in a repository
|
||||
func (repo *Repository) IsOwnerMemberCollaborator(userID int64) (bool, error) {
|
||||
if repo.OwnerID == userID {
|
||||
return true, nil
|
||||
}
|
||||
teamMember, err := x.Join("INNER", "team_repo", "team_repo.team_id = team_user.team_id").
|
||||
Join("INNER", "team_unit", "team_unit.team_id = team_user.team_id").
|
||||
Where("team_repo.repo_id = ?", repo.ID).
|
||||
And("team_unit.`type` = ?", UnitTypeCode).
|
||||
And("team_user.uid = ?", userID).Table("team_user").Exist(&TeamUser{})
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if teamMember {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return x.Get(&Collaboration{RepoID: repo.ID, UserID: userID})
|
||||
}
|
||||
|
|
|
@ -124,41 +124,43 @@ func generateRepoCommit(e Engine, repo, templateRepo, generateRepo *Repository,
|
|||
return fmt.Errorf("checkGiteaTemplate: %v", err)
|
||||
}
|
||||
|
||||
if err := os.Remove(gt.Path); err != nil {
|
||||
return fmt.Errorf("remove .giteatemplate: %v", err)
|
||||
}
|
||||
if gt != nil {
|
||||
if err := os.Remove(gt.Path); err != nil {
|
||||
return fmt.Errorf("remove .giteatemplate: %v", err)
|
||||
}
|
||||
|
||||
// Avoid walking tree if there are no globs
|
||||
if len(gt.Globs()) > 0 {
|
||||
tmpDirSlash := strings.TrimSuffix(filepath.ToSlash(tmpDir), "/") + "/"
|
||||
if err := filepath.Walk(tmpDirSlash, func(path string, info os.FileInfo, walkErr error) error {
|
||||
if walkErr != nil {
|
||||
return walkErr
|
||||
}
|
||||
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
base := strings.TrimPrefix(filepath.ToSlash(path), tmpDirSlash)
|
||||
for _, g := range gt.Globs() {
|
||||
if g.Match(base) {
|
||||
content, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := ioutil.WriteFile(path,
|
||||
[]byte(generateExpansion(string(content), templateRepo, generateRepo)),
|
||||
0644); err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
// Avoid walking tree if there are no globs
|
||||
if len(gt.Globs()) > 0 {
|
||||
tmpDirSlash := strings.TrimSuffix(filepath.ToSlash(tmpDir), "/") + "/"
|
||||
if err := filepath.Walk(tmpDirSlash, func(path string, info os.FileInfo, walkErr error) error {
|
||||
if walkErr != nil {
|
||||
return walkErr
|
||||
}
|
||||
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
base := strings.TrimPrefix(filepath.ToSlash(path), tmpDirSlash)
|
||||
for _, g := range gt.Globs() {
|
||||
if g.Match(base) {
|
||||
content, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := ioutil.WriteFile(path,
|
||||
[]byte(generateExpansion(string(content), templateRepo, generateRepo)),
|
||||
0644); err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -144,6 +144,10 @@ type SearchRepoOptions struct {
|
|||
TopicOnly bool
|
||||
// include description in keyword search
|
||||
IncludeDescription bool
|
||||
// None -> include has milestones AND has no milestone
|
||||
// True -> include just has milestones
|
||||
// False -> include just has no milestone
|
||||
HasMilestones util.OptionalBool
|
||||
}
|
||||
|
||||
//SearchOrderBy is used to sort the result
|
||||
|
@ -171,12 +175,9 @@ const (
|
|||
SearchOrderByForksReverse SearchOrderBy = "num_forks DESC"
|
||||
)
|
||||
|
||||
// SearchRepository returns repositories based on search options,
|
||||
// SearchRepositoryCondition returns repositories based on search options,
|
||||
// it returns results in given range and number of total results.
|
||||
func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
|
||||
if opts.Page <= 0 {
|
||||
opts.Page = 1
|
||||
}
|
||||
func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond {
|
||||
var cond = builder.NewCond()
|
||||
|
||||
if opts.Private {
|
||||
|
@ -213,14 +214,35 @@ func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
|
|||
}
|
||||
|
||||
if opts.Collaborate != util.OptionalBoolFalse {
|
||||
// A Collaboration is:
|
||||
collaborateCond := builder.And(
|
||||
// 1. Repository we don't own
|
||||
builder.Neq{"owner_id": opts.OwnerID},
|
||||
// 2. But we can see because of:
|
||||
builder.Or(
|
||||
builder.Expr("repository.id IN (SELECT repo_id FROM `access` WHERE access.user_id = ?)", opts.OwnerID),
|
||||
builder.In("id", builder.Select("`team_repo`.repo_id").
|
||||
// A. We have access
|
||||
builder.In("`repository`.id",
|
||||
builder.Select("`access`.repo_id").
|
||||
From("access").
|
||||
Where(builder.Eq{"`access`.user_id": opts.OwnerID})),
|
||||
// B. We are in a team for
|
||||
builder.In("`repository`.id", builder.Select("`team_repo`.repo_id").
|
||||
From("team_repo").
|
||||
Where(builder.Eq{"`team_user`.uid": opts.OwnerID}).
|
||||
Join("INNER", "team_user", "`team_user`.team_id = `team_repo`.team_id"))),
|
||||
builder.Neq{"owner_id": opts.OwnerID})
|
||||
Join("INNER", "team_user", "`team_user`.team_id = `team_repo`.team_id")),
|
||||
// C. Public repositories in private organizations that we are member of
|
||||
builder.And(
|
||||
builder.Eq{"`repository`.is_private": false},
|
||||
builder.In("`repository`.owner_id",
|
||||
builder.Select("`org_user`.org_id").
|
||||
From("org_user").
|
||||
Join("INNER", "`user`", "`user`.id = `org_user`.org_id").
|
||||
Where(builder.Eq{
|
||||
"`org_user`.uid": opts.OwnerID,
|
||||
"`user`.type": UserTypeOrganization,
|
||||
"`user`.visibility": structs.VisibleTypePrivate,
|
||||
})))),
|
||||
)
|
||||
if !opts.Private {
|
||||
collaborateCond = collaborateCond.And(builder.Expr("owner_id NOT IN (SELECT org_id FROM org_user WHERE org_user.uid = ? AND org_user.is_public = ?)", opts.OwnerID, false))
|
||||
}
|
||||
|
@ -276,6 +298,29 @@ func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
|
|||
cond = cond.And(builder.Eq{"is_mirror": opts.Mirror == util.OptionalBoolTrue})
|
||||
}
|
||||
|
||||
switch opts.HasMilestones {
|
||||
case util.OptionalBoolTrue:
|
||||
cond = cond.And(builder.Gt{"num_milestones": 0})
|
||||
case util.OptionalBoolFalse:
|
||||
cond = cond.And(builder.Eq{"num_milestones": 0}.Or(builder.IsNull{"num_milestones"}))
|
||||
}
|
||||
|
||||
return cond
|
||||
}
|
||||
|
||||
// SearchRepository returns repositories based on search options,
|
||||
// it returns results in given range and number of total results.
|
||||
func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
|
||||
cond := SearchRepositoryCondition(opts)
|
||||
return SearchRepositoryByCondition(opts, cond)
|
||||
}
|
||||
|
||||
// SearchRepositoryByCondition search repositories by condition
|
||||
func SearchRepositoryByCondition(opts *SearchRepoOptions, cond builder.Cond) (RepositoryList, int64, error) {
|
||||
if opts.Page <= 0 {
|
||||
opts.Page = 1
|
||||
}
|
||||
|
||||
if len(opts.OrderBy) == 0 {
|
||||
opts.OrderBy = SearchOrderByAlphabetically
|
||||
}
|
||||
|
@ -296,11 +341,11 @@ func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
|
|||
}
|
||||
|
||||
repos := make(RepositoryList, 0, opts.PageSize)
|
||||
if err = sess.
|
||||
Where(cond).
|
||||
OrderBy(opts.OrderBy.String()).
|
||||
Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).
|
||||
Find(&repos); err != nil {
|
||||
sess.Where(cond).OrderBy(opts.OrderBy.String())
|
||||
if opts.PageSize > 0 {
|
||||
sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize)
|
||||
}
|
||||
if err = sess.Find(&repos); err != nil {
|
||||
return nil, 0, fmt.Errorf("Repo: %v", err)
|
||||
}
|
||||
|
||||
|
@ -315,31 +360,40 @@ func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
|
|||
|
||||
// accessibleRepositoryCondition takes a user a returns a condition for checking if a repository is accessible
|
||||
func accessibleRepositoryCondition(userID int64) builder.Cond {
|
||||
if userID <= 0 {
|
||||
// Public repositories that are not in private or limited organizations
|
||||
return builder.And(
|
||||
builder.Eq{"`repository`.is_private": false},
|
||||
builder.NotIn("`repository`.owner_id",
|
||||
builder.Select("id").From("`user`").Where(builder.Eq{"type": UserTypeOrganization}).And(builder.Neq{"visibility": structs.VisibleTypePublic})))
|
||||
}
|
||||
|
||||
return builder.Or(
|
||||
// 1. Be able to see all non-private repositories that either:
|
||||
// 1. All public repositories that are not in private organizations
|
||||
builder.And(
|
||||
builder.Eq{"`repository`.is_private": false},
|
||||
builder.Or(
|
||||
// A. Aren't in organisations __OR__
|
||||
builder.NotIn("`repository`.owner_id", builder.Select("id").From("`user`").Where(builder.Eq{"type": UserTypeOrganization})),
|
||||
// B. Isn't a private organisation. (Limited is OK because we're logged in)
|
||||
builder.NotIn("`repository`.owner_id", builder.Select("id").From("`user`").Where(builder.Eq{"visibility": structs.VisibleTypePrivate}))),
|
||||
),
|
||||
// 2. Be able to see all repositories that we have access to
|
||||
builder.Or(
|
||||
builder.In("`repository`.id", builder.Select("repo_id").
|
||||
From("`access`").
|
||||
Where(builder.And(
|
||||
builder.Eq{"user_id": userID},
|
||||
builder.Gt{"mode": int(AccessModeNone)}))),
|
||||
builder.In("`repository`.id", builder.Select("id").
|
||||
From("`repository`").
|
||||
Where(builder.Eq{"owner_id": userID}))),
|
||||
// 3. Be able to see all repositories that we are in a team
|
||||
builder.NotIn("`repository`.owner_id",
|
||||
builder.Select("id").From("`user`").Where(builder.Eq{"type": UserTypeOrganization}).And(builder.Eq{"visibility": structs.VisibleTypePrivate}))),
|
||||
// 2. Be able to see all repositories that we own
|
||||
builder.Eq{"`repository`.owner_id": userID},
|
||||
// 3. Be able to see all repositories that we have access to
|
||||
builder.In("`repository`.id", builder.Select("repo_id").
|
||||
From("`access`").
|
||||
Where(builder.And(
|
||||
builder.Eq{"user_id": userID},
|
||||
builder.Gt{"mode": int(AccessModeNone)}))),
|
||||
// 4. Be able to see all repositories that we are in a team
|
||||
builder.In("`repository`.id", builder.Select("`team_repo`.repo_id").
|
||||
From("team_repo").
|
||||
Where(builder.Eq{"`team_user`.uid": userID}).
|
||||
Join("INNER", "team_user", "`team_user`.team_id = `team_repo`.team_id")))
|
||||
Join("INNER", "team_user", "`team_user`.team_id = `team_repo`.team_id")),
|
||||
// 5. Be able to see all public repos in private organizations that we are an org_user of
|
||||
builder.And(builder.Eq{"`repository`.is_private": false},
|
||||
builder.In("`repository`.owner_id",
|
||||
builder.Select("`org_user`.org_id").
|
||||
From("org_user").
|
||||
Where(builder.Eq{"`org_user`.uid": userID}))),
|
||||
)
|
||||
}
|
||||
|
||||
// SearchRepositoryByName takes keyword and part of repository name to search,
|
||||
|
@ -349,6 +403,12 @@ func SearchRepositoryByName(opts *SearchRepoOptions) (RepositoryList, int64, err
|
|||
return SearchRepository(opts)
|
||||
}
|
||||
|
||||
// AccessibleRepoIDsQuery queries accessible repository ids. Usable as a subquery wherever repo ids need to be filtered.
|
||||
func AccessibleRepoIDsQuery(userID int64) *builder.Builder {
|
||||
// NB: Please note this code needs to still work if user is nil
|
||||
return builder.Select("id").From("repository").Where(accessibleRepositoryCondition(userID))
|
||||
}
|
||||
|
||||
// FindUserAccessibleRepoIDs find all accessible repositories' ID by user's id
|
||||
func FindUserAccessibleRepoIDs(userID int64) ([]int64, error) {
|
||||
var accessCond builder.Cond = builder.Eq{"is_private": false}
|
||||
|
|
|
@ -15,6 +15,7 @@ import (
|
|||
"encoding/pem"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math/big"
|
||||
"os"
|
||||
|
@ -687,14 +688,29 @@ func rewriteAllPublicKeys(e Engine) error {
|
|||
}
|
||||
}
|
||||
|
||||
err = e.Iterate(new(PublicKey), func(idx int, bean interface{}) (err error) {
|
||||
_, err = t.WriteString((bean.(*PublicKey)).AuthorizedString())
|
||||
if err := regeneratePublicKeys(e, t); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
t.Close()
|
||||
return os.Rename(tmpPath, fPath)
|
||||
}
|
||||
|
||||
// RegeneratePublicKeys regenerates the authorized_keys file
|
||||
func RegeneratePublicKeys(t io.Writer) error {
|
||||
return regeneratePublicKeys(x, t)
|
||||
}
|
||||
|
||||
func regeneratePublicKeys(e Engine, t io.Writer) error {
|
||||
err := e.Iterate(new(PublicKey), func(idx int, bean interface{}) (err error) {
|
||||
_, err = t.Write([]byte((bean.(*PublicKey)).AuthorizedString()))
|
||||
return err
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fPath := filepath.Join(setting.SSH.RootPath, "authorized_keys")
|
||||
if com.IsExist(fPath) {
|
||||
f, err := os.Open(fPath)
|
||||
if err != nil {
|
||||
|
@ -707,7 +723,7 @@ func rewriteAllPublicKeys(e Engine) error {
|
|||
scanner.Scan()
|
||||
continue
|
||||
}
|
||||
_, err = t.WriteString(line + "\n")
|
||||
_, err = t.Write([]byte(line + "\n"))
|
||||
if err != nil {
|
||||
f.Close()
|
||||
return err
|
||||
|
@ -715,9 +731,7 @@ func rewriteAllPublicKeys(e Engine) error {
|
|||
}
|
||||
f.Close()
|
||||
}
|
||||
|
||||
t.Close()
|
||||
return os.Rename(tmpPath, fPath)
|
||||
return nil
|
||||
}
|
||||
|
||||
// ________ .__ ____ __.
|
||||
|
|
|
@ -129,7 +129,7 @@ func addTopicByNameToRepo(e Engine, repoID int64, topicName string) (*Topic, err
|
|||
}
|
||||
|
||||
// removeTopicFromRepo remove a topic from a repo and decrements the topic repo count
|
||||
func removeTopicFromRepo(repoID int64, topic *Topic, e Engine) error {
|
||||
func removeTopicFromRepo(e Engine, repoID int64, topic *Topic) error {
|
||||
topic.RepoCount--
|
||||
if _, err := e.ID(topic.ID).Cols("repo_count").Update(topic); err != nil {
|
||||
return err
|
||||
|
@ -145,6 +145,24 @@ func removeTopicFromRepo(repoID int64, topic *Topic, e Engine) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// removeTopicsFromRepo remove all topics from the repo and decrements respective topics repo count
|
||||
func removeTopicsFromRepo(e Engine, repoID int64) error {
|
||||
_, err := e.Where(
|
||||
builder.In("id",
|
||||
builder.Select("topic_id").From("repo_topic").Where(builder.Eq{"repo_id": repoID}),
|
||||
),
|
||||
).Cols("repo_count").SetExpr("repo_count", "repo_count-1").Update(&Topic{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err = e.Delete(&RepoTopic{RepoID: repoID}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// FindTopicOptions represents the options when fdin topics
|
||||
type FindTopicOptions struct {
|
||||
RepoID int64
|
||||
|
@ -217,7 +235,7 @@ func DeleteTopic(repoID int64, topicName string) (*Topic, error) {
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
err = removeTopicFromRepo(repoID, topic, x)
|
||||
err = removeTopicFromRepo(x, repoID, topic)
|
||||
|
||||
return topic, err
|
||||
}
|
||||
|
@ -278,7 +296,7 @@ func SaveTopics(repoID int64, topicNames ...string) error {
|
|||
}
|
||||
|
||||
for _, topic := range removeTopics {
|
||||
err := removeTopicFromRepo(repoID, topic, sess)
|
||||
err := removeTopicFromRepo(sess, repoID, topic)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -142,8 +142,8 @@ func UpdateTwoFactor(t *TwoFactor) error {
|
|||
// GetTwoFactorByUID returns the two-factor authentication token associated with
|
||||
// the user, if any.
|
||||
func GetTwoFactorByUID(uid int64) (*TwoFactor, error) {
|
||||
twofa := &TwoFactor{UID: uid}
|
||||
has, err := x.Get(twofa)
|
||||
twofa := &TwoFactor{}
|
||||
has, err := x.Where("uid=?", uid).Get(twofa)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if !has {
|
||||
|
|
|
@ -84,7 +84,7 @@ func MainTest(m *testing.M, pathToGiteaRoot string) {
|
|||
|
||||
func createTestEngine(fixturesDir string) error {
|
||||
var err error
|
||||
x, err = xorm.NewEngine("sqlite3", "file::memory:?cache=shared")
|
||||
x, err = xorm.NewEngine("sqlite3", "file::memory:?cache=shared&_txlock=immediate")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -76,8 +76,8 @@ func NewUpload(name string, buf []byte, file multipart.File) (_ *Upload, err err
|
|||
|
||||
// GetUploadByUUID returns the Upload by UUID
|
||||
func GetUploadByUUID(uuid string) (*Upload, error) {
|
||||
upload := &Upload{UUID: uuid}
|
||||
has, err := x.Get(upload)
|
||||
upload := &Upload{}
|
||||
has, err := x.Where("uuid=?", uuid).Get(upload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if !has {
|
||||
|
|
|
@ -18,6 +18,7 @@ import (
|
|||
"image/png"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
@ -87,6 +88,9 @@ var (
|
|||
|
||||
// ErrUnsupportedLoginType login source is unknown error
|
||||
ErrUnsupportedLoginType = errors.New("Login source is unknown")
|
||||
|
||||
// Characters prohibited in a user name (anything except A-Za-z0-9_.-)
|
||||
alphaDashDotPattern = regexp.MustCompile(`[^\w-\.]`)
|
||||
)
|
||||
|
||||
// User represents the object of individual and member of organization.
|
||||
|
@ -503,7 +507,7 @@ func (u *User) ValidatePassword(passwd string) bool {
|
|||
|
||||
// IsPasswordSet checks if the password is set or left empty
|
||||
func (u *User) IsPasswordSet() bool {
|
||||
return len(u.Passwd) > 0
|
||||
return !u.ValidatePassword("")
|
||||
}
|
||||
|
||||
// UploadAvatar saves custom avatar for user.
|
||||
|
@ -708,9 +712,11 @@ func (u *User) DisplayName() string {
|
|||
// GetDisplayName returns full name if it's not empty and DEFAULT_SHOW_FULL_NAME is set,
|
||||
// returns username otherwise.
|
||||
func (u *User) GetDisplayName() string {
|
||||
trimmed := strings.TrimSpace(u.FullName)
|
||||
if len(trimmed) > 0 && setting.UI.DefaultShowFullName {
|
||||
return trimmed
|
||||
if setting.UI.DefaultShowFullName {
|
||||
trimmed := strings.TrimSpace(u.FullName)
|
||||
if len(trimmed) > 0 {
|
||||
return trimmed
|
||||
}
|
||||
}
|
||||
return u.Name
|
||||
}
|
||||
|
@ -819,7 +825,9 @@ var (
|
|||
"issues",
|
||||
"js",
|
||||
"less",
|
||||
"manifest.json",
|
||||
"metrics",
|
||||
"milestones",
|
||||
"new",
|
||||
"notifications",
|
||||
"org",
|
||||
|
@ -868,6 +876,11 @@ func isUsableName(names, patterns []string, name string) error {
|
|||
|
||||
// IsUsableUsername returns an error when a username is reserved
|
||||
func IsUsableUsername(name string) error {
|
||||
// Validate username make sure it satisfies requirement.
|
||||
if alphaDashDotPattern.MatchString(name) {
|
||||
// Note: usually this error is normally caught up earlier in the UI
|
||||
return ErrNameCharsNotAllowed{Name: name}
|
||||
}
|
||||
return isUsableName(reservedUsernames, reservedUserPatterns, name)
|
||||
}
|
||||
|
||||
|
@ -987,7 +1000,7 @@ func VerifyActiveEmailCode(code, email string) *EmailAddress {
|
|||
data := com.ToStr(user.ID) + email + user.LowerName + user.Passwd + user.Rands
|
||||
|
||||
if base.VerifyTimeLimitCode(data, minutes, prefix) {
|
||||
emailAddress := &EmailAddress{Email: email}
|
||||
emailAddress := &EmailAddress{UID: user.ID, Email: email}
|
||||
if has, _ := x.Get(emailAddress); has {
|
||||
return emailAddress
|
||||
}
|
||||
|
@ -1444,8 +1457,8 @@ func GetUserByEmail(email string) (*User, error) {
|
|||
// Finally, if email address is the protected email address:
|
||||
if strings.HasSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress)) {
|
||||
username := strings.TrimSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress))
|
||||
user := &User{LowerName: username}
|
||||
has, err := x.Get(user)
|
||||
user := &User{}
|
||||
has, err := x.Where("lower_name=?", username).Get(user)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -1760,6 +1773,15 @@ func SyncExternalUsers(ctx context.Context) {
|
|||
continue
|
||||
}
|
||||
|
||||
if len(sr) == 0 {
|
||||
if !s.LDAP().AllowDeactivateAll {
|
||||
log.Error("LDAP search found no entries but did not report an error. Refusing to deactivate all users")
|
||||
continue
|
||||
} else {
|
||||
log.Warn("LDAP search found no entries but did not report an error. All users will be deactivated as per settings")
|
||||
}
|
||||
}
|
||||
|
||||
for _, su := range sr {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Copyright 2016 The Gogs Authors. All rights reserved.
|
||||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
|
@ -8,6 +9,12 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"xorm.io/builder"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -54,19 +61,72 @@ func GetEmailAddresses(uid int64) ([]*EmailAddress, error) {
|
|||
if !isPrimaryFound {
|
||||
emails = append(emails, &EmailAddress{
|
||||
Email: u.Email,
|
||||
IsActivated: true,
|
||||
IsActivated: u.IsActive,
|
||||
IsPrimary: true,
|
||||
})
|
||||
}
|
||||
return emails, nil
|
||||
}
|
||||
|
||||
// GetEmailAddressByID gets a user's email address by ID
|
||||
func GetEmailAddressByID(uid, id int64) (*EmailAddress, error) {
|
||||
// User ID is required for security reasons
|
||||
email := &EmailAddress{UID: uid}
|
||||
if has, err := x.ID(id).Get(email); err != nil {
|
||||
return nil, err
|
||||
} else if !has {
|
||||
return nil, nil
|
||||
}
|
||||
return email, nil
|
||||
}
|
||||
|
||||
func isEmailActive(e Engine, email string, userID, emailID int64) (bool, error) {
|
||||
if len(email) == 0 {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// Can't filter by boolean field unless it's explicit
|
||||
cond := builder.NewCond()
|
||||
cond = cond.And(builder.Eq{"email": email}, builder.Neq{"id": emailID})
|
||||
if setting.Service.RegisterEmailConfirm {
|
||||
// Inactive (unvalidated) addresses don't count as active if email validation is required
|
||||
cond = cond.And(builder.Eq{"is_activated": true})
|
||||
}
|
||||
|
||||
em := EmailAddress{}
|
||||
|
||||
if has, err := e.Where(cond).Get(&em); has || err != nil {
|
||||
if has {
|
||||
log.Info("isEmailActive('%s',%d,%d) found duplicate in email ID %d", email, userID, emailID, em.ID)
|
||||
}
|
||||
return has, err
|
||||
}
|
||||
|
||||
// Can't filter by boolean field unless it's explicit
|
||||
cond = builder.NewCond()
|
||||
cond = cond.And(builder.Eq{"email": email}, builder.Neq{"id": userID})
|
||||
if setting.Service.RegisterEmailConfirm {
|
||||
cond = cond.And(builder.Eq{"is_active": true})
|
||||
}
|
||||
|
||||
us := User{}
|
||||
|
||||
if has, err := e.Where(cond).Get(&us); has || err != nil {
|
||||
if has {
|
||||
log.Info("isEmailActive('%s',%d,%d) found duplicate in user ID %d", email, userID, emailID, us.ID)
|
||||
}
|
||||
return has, err
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func isEmailUsed(e Engine, email string) (bool, error) {
|
||||
if len(email) == 0 {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return e.Get(&EmailAddress{Email: email})
|
||||
return e.Where("email=?", email).Get(&EmailAddress{})
|
||||
}
|
||||
|
||||
// IsEmailUsed returns true if the email has been used.
|
||||
|
@ -118,31 +178,30 @@ func AddEmailAddresses(emails []*EmailAddress) error {
|
|||
|
||||
// Activate activates the email address to given user.
|
||||
func (email *EmailAddress) Activate() error {
|
||||
user, err := GetUserByID(email.UID)
|
||||
sess := x.NewSession()
|
||||
defer sess.Close()
|
||||
if err := sess.Begin(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := email.updateActivation(sess, true); err != nil {
|
||||
return err
|
||||
}
|
||||
return sess.Commit()
|
||||
}
|
||||
|
||||
func (email *EmailAddress) updateActivation(e Engine, activate bool) error {
|
||||
user, err := getUserByID(e, email.UID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if user.Rands, err = GetUserSalt(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sess := x.NewSession()
|
||||
defer sess.Close()
|
||||
if err = sess.Begin(); err != nil {
|
||||
email.IsActivated = activate
|
||||
if _, err := e.ID(email.ID).Cols("is_activated").Update(email); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
email.IsActivated = true
|
||||
if _, err := sess.
|
||||
ID(email.ID).
|
||||
Cols("is_activated").
|
||||
Update(email); err != nil {
|
||||
return err
|
||||
} else if err = updateUserCols(sess, user, "rands"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return sess.Commit()
|
||||
return updateUserCols(e, user, "rands")
|
||||
}
|
||||
|
||||
// DeleteEmailAddress deletes an email address of given user.
|
||||
|
@ -192,8 +251,8 @@ func MakeEmailPrimary(email *EmailAddress) error {
|
|||
return ErrEmailNotActivated
|
||||
}
|
||||
|
||||
user := &User{ID: email.UID}
|
||||
has, err = x.Get(user)
|
||||
user := &User{}
|
||||
has, err = x.ID(email.UID).Get(user)
|
||||
if err != nil {
|
||||
return err
|
||||
} else if !has {
|
||||
|
@ -201,7 +260,7 @@ func MakeEmailPrimary(email *EmailAddress) error {
|
|||
}
|
||||
|
||||
// Make sure the former primary email doesn't disappear.
|
||||
formerPrimaryEmail := &EmailAddress{Email: user.Email}
|
||||
formerPrimaryEmail := &EmailAddress{UID: user.ID, Email: user.Email}
|
||||
has, err = x.Get(formerPrimaryEmail)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -228,3 +287,199 @@ func MakeEmailPrimary(email *EmailAddress) error {
|
|||
|
||||
return sess.Commit()
|
||||
}
|
||||
|
||||
// SearchEmailOrderBy is used to sort the results from SearchEmails()
|
||||
type SearchEmailOrderBy string
|
||||
|
||||
func (s SearchEmailOrderBy) String() string {
|
||||
return string(s)
|
||||
}
|
||||
|
||||
// Strings for sorting result
|
||||
const (
|
||||
SearchEmailOrderByEmail SearchEmailOrderBy = "emails.email ASC, is_primary DESC, sortid ASC"
|
||||
SearchEmailOrderByEmailReverse SearchEmailOrderBy = "emails.email DESC, is_primary ASC, sortid DESC"
|
||||
SearchEmailOrderByName SearchEmailOrderBy = "`user`.lower_name ASC, is_primary DESC, sortid ASC"
|
||||
SearchEmailOrderByNameReverse SearchEmailOrderBy = "`user`.lower_name DESC, is_primary ASC, sortid DESC"
|
||||
)
|
||||
|
||||
// SearchEmailOptions are options to search e-mail addresses for the admin panel
|
||||
type SearchEmailOptions struct {
|
||||
Page int
|
||||
PageSize int // Can be smaller than or equal to setting.UI.ExplorePagingNum
|
||||
Keyword string
|
||||
SortType SearchEmailOrderBy
|
||||
IsPrimary util.OptionalBool
|
||||
IsActivated util.OptionalBool
|
||||
}
|
||||
|
||||
// SearchEmailResult is an e-mail address found in the user or email_address table
|
||||
type SearchEmailResult struct {
|
||||
UID int64
|
||||
Email string
|
||||
IsActivated bool
|
||||
IsPrimary bool
|
||||
// From User
|
||||
Name string
|
||||
FullName string
|
||||
}
|
||||
|
||||
// SearchEmails takes options i.e. keyword and part of email name to search,
|
||||
// it returns results in given range and number of total results.
|
||||
func SearchEmails(opts *SearchEmailOptions) ([]*SearchEmailResult, int64, error) {
|
||||
// Unfortunately, UNION support for SQLite in xorm is currently broken, so we must
|
||||
// build the SQL ourselves.
|
||||
where := make([]string, 0, 5)
|
||||
args := make([]interface{}, 0, 5)
|
||||
|
||||
emailsSQL := "(SELECT id as sortid, uid, email, is_activated, 0 as is_primary " +
|
||||
"FROM email_address " +
|
||||
"UNION ALL " +
|
||||
"SELECT id as sortid, id AS uid, email, is_active AS is_activated, 1 as is_primary " +
|
||||
"FROM `user` " +
|
||||
"WHERE type = ?) AS emails"
|
||||
args = append(args, UserTypeIndividual)
|
||||
|
||||
if len(opts.Keyword) > 0 {
|
||||
// Note: % can be injected in the Keyword parameter, but it won't do any harm.
|
||||
where = append(where, "(lower(`user`.full_name) LIKE ? OR `user`.lower_name LIKE ? OR emails.email LIKE ?)")
|
||||
likeStr := "%" + strings.ToLower(opts.Keyword) + "%"
|
||||
args = append(args, likeStr)
|
||||
args = append(args, likeStr)
|
||||
args = append(args, likeStr)
|
||||
}
|
||||
|
||||
switch {
|
||||
case opts.IsPrimary.IsTrue():
|
||||
where = append(where, "emails.is_primary = ?")
|
||||
args = append(args, true)
|
||||
case opts.IsPrimary.IsFalse():
|
||||
where = append(where, "emails.is_primary = ?")
|
||||
args = append(args, false)
|
||||
}
|
||||
|
||||
switch {
|
||||
case opts.IsActivated.IsTrue():
|
||||
where = append(where, "emails.is_activated = ?")
|
||||
args = append(args, true)
|
||||
case opts.IsActivated.IsFalse():
|
||||
where = append(where, "emails.is_activated = ?")
|
||||
args = append(args, false)
|
||||
}
|
||||
|
||||
var whereStr string
|
||||
if len(where) > 0 {
|
||||
whereStr = "WHERE " + strings.Join(where, " AND ")
|
||||
}
|
||||
|
||||
joinSQL := "FROM " + emailsSQL + " INNER JOIN `user` ON `user`.id = emails.uid " + whereStr
|
||||
|
||||
count, err := x.SQL("SELECT count(*) "+joinSQL, args...).Count()
|
||||
if err != nil {
|
||||
return nil, 0, fmt.Errorf("Count: %v", err)
|
||||
}
|
||||
|
||||
orderby := opts.SortType.String()
|
||||
if orderby == "" {
|
||||
orderby = SearchEmailOrderByEmail.String()
|
||||
}
|
||||
|
||||
querySQL := "SELECT emails.uid, emails.email, emails.is_activated, emails.is_primary, " +
|
||||
"`user`.name, `user`.full_name " + joinSQL + " ORDER BY " + orderby
|
||||
|
||||
if opts.PageSize == 0 || opts.PageSize > setting.UI.ExplorePagingNum {
|
||||
opts.PageSize = setting.UI.ExplorePagingNum
|
||||
}
|
||||
if opts.Page <= 0 {
|
||||
opts.Page = 1
|
||||
}
|
||||
|
||||
rows, err := x.SQL(querySQL, args...).Rows(new(SearchEmailResult))
|
||||
if err != nil {
|
||||
return nil, 0, fmt.Errorf("Emails: %v", err)
|
||||
}
|
||||
|
||||
// Page manually because xorm can't handle Limit() with raw SQL
|
||||
defer rows.Close()
|
||||
|
||||
emails := make([]*SearchEmailResult, 0, opts.PageSize)
|
||||
skip := (opts.Page - 1) * opts.PageSize
|
||||
|
||||
for rows.Next() {
|
||||
var email SearchEmailResult
|
||||
if err := rows.Scan(&email); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if skip > 0 {
|
||||
skip--
|
||||
continue
|
||||
}
|
||||
emails = append(emails, &email)
|
||||
if len(emails) == opts.PageSize {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return emails, count, err
|
||||
}
|
||||
|
||||
// ActivateUserEmail will change the activated state of an email address,
|
||||
// either primary (in the user table) or secondary (in the email_address table)
|
||||
func ActivateUserEmail(userID int64, email string, primary, activate bool) (err error) {
|
||||
sess := x.NewSession()
|
||||
defer sess.Close()
|
||||
if err = sess.Begin(); err != nil {
|
||||
return err
|
||||
}
|
||||
if primary {
|
||||
// Activate/deactivate a user's primary email address
|
||||
user := User{ID: userID, Email: email}
|
||||
if has, err := sess.Get(&user); err != nil {
|
||||
return err
|
||||
} else if !has {
|
||||
return fmt.Errorf("no such user: %d (%s)", userID, email)
|
||||
}
|
||||
if user.IsActive == activate {
|
||||
// Already in the desired state; no action
|
||||
return nil
|
||||
}
|
||||
if activate {
|
||||
if used, err := isEmailActive(sess, email, userID, 0); err != nil {
|
||||
return fmt.Errorf("isEmailActive(): %v", err)
|
||||
} else if used {
|
||||
return ErrEmailAlreadyUsed{Email: email}
|
||||
}
|
||||
}
|
||||
user.IsActive = activate
|
||||
if user.Rands, err = GetUserSalt(); err != nil {
|
||||
return fmt.Errorf("generate salt: %v", err)
|
||||
}
|
||||
if err = updateUserCols(sess, &user, "is_active", "rands"); err != nil {
|
||||
return fmt.Errorf("updateUserCols(): %v", err)
|
||||
}
|
||||
} else {
|
||||
// Activate/deactivate a user's secondary email address
|
||||
// First check if there's another user active with the same address
|
||||
addr := EmailAddress{UID: userID, Email: email}
|
||||
if has, err := sess.Get(&addr); err != nil {
|
||||
return err
|
||||
} else if !has {
|
||||
return fmt.Errorf("no such email: %d (%s)", userID, email)
|
||||
}
|
||||
if addr.IsActivated == activate {
|
||||
// Already in the desired state; no action
|
||||
return nil
|
||||
}
|
||||
if activate {
|
||||
if used, err := isEmailActive(sess, email, 0, addr.ID); err != nil {
|
||||
return fmt.Errorf("isEmailActive(): %v", err)
|
||||
} else if used {
|
||||
return ErrEmailAlreadyUsed{Email: email}
|
||||
}
|
||||
}
|
||||
if err = addr.updateActivation(sess, activate); err != nil {
|
||||
return fmt.Errorf("updateActivation(): %v", err)
|
||||
}
|
||||
}
|
||||
return sess.Commit()
|
||||
}
|
||||
|
|
|
@ -7,6 +7,8 @@ package models
|
|||
import (
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
|
@ -169,3 +171,65 @@ func TestActivate(t *testing.T) {
|
|||
assert.True(t, emails[2].IsActivated)
|
||||
assert.True(t, emails[2].IsPrimary)
|
||||
}
|
||||
|
||||
func TestListEmails(t *testing.T) {
|
||||
assert.NoError(t, PrepareTestDatabase())
|
||||
|
||||
// Must find all users and their emails
|
||||
opts := &SearchEmailOptions{}
|
||||
emails, count, err := SearchEmails(opts)
|
||||
assert.NoError(t, err)
|
||||
assert.NotEqual(t, int64(0), count)
|
||||
assert.True(t, count > 5)
|
||||
|
||||
contains := func(match func(s *SearchEmailResult) bool) bool {
|
||||
for _, v := range emails {
|
||||
if match(v) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
assert.True(t, contains(func(s *SearchEmailResult) bool { return s.UID == 18 }))
|
||||
// 'user3' is an organization
|
||||
assert.False(t, contains(func(s *SearchEmailResult) bool { return s.UID == 3 }))
|
||||
|
||||
// Must find no records
|
||||
opts = &SearchEmailOptions{Keyword: "NOTFOUND"}
|
||||
emails, count, err = SearchEmails(opts)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(0), count)
|
||||
|
||||
// Must find users 'user2', 'user28', etc.
|
||||
opts = &SearchEmailOptions{Keyword: "user2"}
|
||||
emails, count, err = SearchEmails(opts)
|
||||
assert.NoError(t, err)
|
||||
assert.NotEqual(t, int64(0), count)
|
||||
assert.True(t, contains(func(s *SearchEmailResult) bool { return s.UID == 2 }))
|
||||
assert.True(t, contains(func(s *SearchEmailResult) bool { return s.UID == 27 }))
|
||||
|
||||
// Must find only primary addresses (i.e. from the `user` table)
|
||||
opts = &SearchEmailOptions{IsPrimary: util.OptionalBoolTrue}
|
||||
emails, count, err = SearchEmails(opts)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, contains(func(s *SearchEmailResult) bool { return s.IsPrimary }))
|
||||
assert.False(t, contains(func(s *SearchEmailResult) bool { return !s.IsPrimary }))
|
||||
|
||||
// Must find only inactive addresses (i.e. not validated)
|
||||
opts = &SearchEmailOptions{IsActivated: util.OptionalBoolFalse}
|
||||
emails, count, err = SearchEmails(opts)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, contains(func(s *SearchEmailResult) bool { return !s.IsActivated }))
|
||||
assert.False(t, contains(func(s *SearchEmailResult) bool { return s.IsActivated }))
|
||||
|
||||
// Must find more than one page, but retrieve only one
|
||||
opts = &SearchEmailOptions{
|
||||
PageSize: 5,
|
||||
Page: 1,
|
||||
}
|
||||
emails, count, err = SearchEmails(opts)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 5, len(emails))
|
||||
assert.True(t, count > int64(len(emails)))
|
||||
}
|
||||
|
|
|
@ -111,8 +111,8 @@ func GetUserByOpenID(uri string) (*User, error) {
|
|||
log.Trace("Normalized OpenID URI: " + uri)
|
||||
|
||||
// Otherwise, check in openid table
|
||||
oid := &UserOpenID{URI: uri}
|
||||
has, err := x.Get(oid)
|
||||
oid := &UserOpenID{}
|
||||
has, err := x.Where("uri=?", uri).Get(oid)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -47,3 +47,13 @@ type AdminEditUserForm struct {
|
|||
func (f *AdminEditUserForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
|
||||
return validate(errs, ctx.Data, f, ctx.Locale)
|
||||
}
|
||||
|
||||
// AdminDashboardForm form for admin dashboard operations
|
||||
type AdminDashboardForm struct {
|
||||
Op int `binding:"required"`
|
||||
}
|
||||
|
||||
// Validate validates form fields
|
||||
func (f *AdminDashboardForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
|
||||
return validate(errs, ctx.Data, f, ctx.Locale)
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ type AuthenticationForm struct {
|
|||
SearchPageSize int
|
||||
Filter string
|
||||
AdminFilter string
|
||||
AllowDeactivateAll bool
|
||||
IsActive bool
|
||||
IsSyncEnabled bool
|
||||
SMTPAuth string
|
||||
|
|
|
@ -47,6 +47,7 @@ type Source struct {
|
|||
Filter string // Query filter to validate entry
|
||||
AdminFilter string // Query filter to check if user is admin
|
||||
Enabled bool // if this source is disabled
|
||||
AllowDeactivateAll bool // Allow an empty search response to deactivate all users from this source
|
||||
}
|
||||
|
||||
// SearchResult : user data
|
||||
|
|
|
@ -13,7 +13,7 @@ import (
|
|||
)
|
||||
|
||||
// Auth pam auth service
|
||||
func Auth(serviceName, userName, passwd string) error {
|
||||
func Auth(serviceName, userName, passwd string) (string, error) {
|
||||
t, err := pam.StartFunc(serviceName, userName, func(s pam.Style, msg string) (string, error) {
|
||||
switch s {
|
||||
case pam.PromptEchoOff:
|
||||
|
@ -25,12 +25,14 @@ func Auth(serviceName, userName, passwd string) error {
|
|||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
return "", err
|
||||
}
|
||||
|
||||
if err = t.Authenticate(0); err != nil {
|
||||
return err
|
||||
return "", err
|
||||
}
|
||||
|
||||
return nil
|
||||
// PAM login names might suffer transformations in the PAM stack.
|
||||
// We should take whatever the PAM stack returns for it.
|
||||
return t.GetItem(pam.User)
|
||||
}
|
||||
|
|
|
@ -11,6 +11,6 @@ import (
|
|||
)
|
||||
|
||||
// Auth not supported lack of pam tag
|
||||
func Auth(serviceName, userName, passwd string) error {
|
||||
return errors.New("PAM not supported")
|
||||
func Auth(serviceName, userName, passwd string) (string, error) {
|
||||
return "", errors.New("PAM not supported")
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package context
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
|
@ -64,18 +65,18 @@ type APINotFound struct{}
|
|||
// swagger:response redirect
|
||||
type APIRedirect struct{}
|
||||
|
||||
// Error responses error message to client with given message.
|
||||
// Error responds with an error message to client with given obj as the message.
|
||||
// If status is 500, also it prints error to log.
|
||||
func (ctx *APIContext) Error(status int, title string, obj interface{}) {
|
||||
var message string
|
||||
if err, ok := obj.(error); ok {
|
||||
message = err.Error()
|
||||
} else {
|
||||
message = obj.(string)
|
||||
message = fmt.Sprintf("%s", obj)
|
||||
}
|
||||
|
||||
if status == 500 {
|
||||
log.Error("%s: %s", title, message)
|
||||
if status == http.StatusInternalServerError {
|
||||
log.ErrorWithSkip(1, "%s: %s", title, message)
|
||||
}
|
||||
|
||||
ctx.JSON(status, APIError{
|
||||
|
@ -84,6 +85,22 @@ func (ctx *APIContext) Error(status int, title string, obj interface{}) {
|
|||
})
|
||||
}
|
||||
|
||||
// InternalServerError responds with an error message to the client with the error as a message
|
||||
// and the file and line of the caller.
|
||||
func (ctx *APIContext) InternalServerError(err error) {
|
||||
log.ErrorWithSkip(1, "InternalServerError: %v", err)
|
||||
|
||||
var message string
|
||||
if macaron.Env != macaron.PROD {
|
||||
message = err.Error()
|
||||
}
|
||||
|
||||
ctx.JSON(http.StatusInternalServerError, APIError{
|
||||
Message: message,
|
||||
URL: setting.API.SwaggerURL,
|
||||
})
|
||||
}
|
||||
|
||||
func genAPILinks(curURL *url.URL, total, pageSize, curPage int) []string {
|
||||
page := NewPagination(total, pageSize, curPage, 0)
|
||||
paginater := page.Paginater
|
||||
|
@ -212,6 +229,11 @@ func (ctx *APIContext) NotFound(objs ...interface{}) {
|
|||
var message = "Not Found"
|
||||
var errors []string
|
||||
for _, obj := range objs {
|
||||
// Ignore nil
|
||||
if obj == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if err, ok := obj.(error); ok {
|
||||
errors = append(errors, err.Error())
|
||||
} else {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Copyright 2014 The Gogs Authors. All rights reserved.
|
||||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
|
@ -122,7 +123,7 @@ func (ctx *Context) RedirectToFirst(location ...string) {
|
|||
}
|
||||
|
||||
u, err := url.Parse(loc)
|
||||
if err != nil || (u.Scheme != "" && !strings.HasPrefix(strings.ToLower(loc), strings.ToLower(setting.AppURL))) {
|
||||
if err != nil || ((u.Scheme != "" || u.Host != "") && !strings.HasPrefix(strings.ToLower(loc), strings.ToLower(setting.AppURL))) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
|
@ -91,12 +91,12 @@ func (r *Repository) CanUseTimetracker(issue *models.Issue, user *models.User) b
|
|||
// 2. Is the user a contributor, admin, poster or assignee and do the repository policies require this?
|
||||
isAssigned, _ := models.IsUserAssignedToIssue(issue, user)
|
||||
return r.Repository.IsTimetrackerEnabled() && (!r.Repository.AllowOnlyContributorsToTrackTime() ||
|
||||
r.Permission.CanWrite(models.UnitTypeIssues) || issue.IsPoster(user.ID) || isAssigned)
|
||||
r.Permission.CanWriteIssuesOrPulls(issue.IsPull) || issue.IsPoster(user.ID) || isAssigned)
|
||||
}
|
||||
|
||||
// CanCreateIssueDependencies returns whether or not a user can create dependencies.
|
||||
func (r *Repository) CanCreateIssueDependencies(user *models.User) bool {
|
||||
return r.Permission.CanWrite(models.UnitTypeIssues) && r.Repository.IsDependenciesEnabled()
|
||||
func (r *Repository) CanCreateIssueDependencies(user *models.User, isPull bool) bool {
|
||||
return r.Repository.IsDependenciesEnabled() && r.Permission.CanWriteIssuesOrPulls(isPull)
|
||||
}
|
||||
|
||||
// GetCommitsCount returns cached commit count for current view
|
||||
|
@ -396,7 +396,7 @@ func RepoAssignment() macaron.Handler {
|
|||
ctx.Data["RepoExternalIssuesLink"] = unit.ExternalTrackerConfig().ExternalTrackerURL
|
||||
}
|
||||
|
||||
count, err := models.GetReleaseCountByRepoID(ctx.Repo.Repository.ID, models.FindReleasesOptions{
|
||||
ctx.Data["NumReleases"], err = models.GetReleaseCountByRepoID(ctx.Repo.Repository.ID, models.FindReleasesOptions{
|
||||
IncludeDrafts: false,
|
||||
IncludeTags: true,
|
||||
})
|
||||
|
@ -404,7 +404,6 @@ func RepoAssignment() macaron.Handler {
|
|||
ctx.ServerError("GetReleaseCountByRepoID", err)
|
||||
return
|
||||
}
|
||||
ctx.Repo.Repository.NumReleases = int(count)
|
||||
|
||||
ctx.Data["Title"] = owner.Name + "/" + repo.Name
|
||||
ctx.Data["Repository"] = repo
|
||||
|
|
|
@ -30,8 +30,17 @@ func ToEmail(email *models.EmailAddress) *api.Email {
|
|||
}
|
||||
|
||||
// ToBranch convert a git.Commit and git.Branch to an api.Branch
|
||||
func ToBranch(repo *models.Repository, b *git.Branch, c *git.Commit, bp *models.ProtectedBranch, user *models.User) *api.Branch {
|
||||
func ToBranch(repo *models.Repository, b *git.Branch, c *git.Commit, bp *models.ProtectedBranch, user *models.User) (*api.Branch, error) {
|
||||
if bp == nil {
|
||||
var hasPerm bool
|
||||
var err error
|
||||
if user != nil {
|
||||
hasPerm, err = models.HasAccessUnit(user, repo, models.UnitTypeCode, models.AccessModeWrite)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return &api.Branch{
|
||||
Name: b.Name,
|
||||
Commit: ToCommit(repo, c),
|
||||
|
@ -39,20 +48,25 @@ func ToBranch(repo *models.Repository, b *git.Branch, c *git.Commit, bp *models.
|
|||
RequiredApprovals: 0,
|
||||
EnableStatusCheck: false,
|
||||
StatusCheckContexts: []string{},
|
||||
UserCanPush: true,
|
||||
UserCanMerge: true,
|
||||
}
|
||||
UserCanPush: hasPerm,
|
||||
UserCanMerge: hasPerm,
|
||||
}, nil
|
||||
}
|
||||
return &api.Branch{
|
||||
|
||||
branch := &api.Branch{
|
||||
Name: b.Name,
|
||||
Commit: ToCommit(repo, c),
|
||||
Protected: true,
|
||||
RequiredApprovals: bp.RequiredApprovals,
|
||||
EnableStatusCheck: bp.EnableStatusCheck,
|
||||
StatusCheckContexts: bp.StatusCheckContexts,
|
||||
UserCanPush: bp.CanUserPush(user.ID),
|
||||
UserCanMerge: bp.CanUserMerge(user.ID),
|
||||
}
|
||||
|
||||
if user != nil {
|
||||
branch.UserCanPush = bp.CanUserPush(user.ID)
|
||||
branch.UserCanMerge = bp.IsUserMergeWhitelisted(user.ID)
|
||||
}
|
||||
return branch, nil
|
||||
}
|
||||
|
||||
// ToTag convert a git.Tag to an api.Tag
|
||||
|
|
|
@ -11,7 +11,7 @@ import (
|
|||
"io"
|
||||
"io/ioutil"
|
||||
|
||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
)
|
||||
|
||||
// Blob represents a Git object.
|
||||
|
|
|
@ -20,7 +20,7 @@ import (
|
|||
"strconv"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
)
|
||||
|
||||
// Commit represents a git commit.
|
||||
|
|
|
@ -6,9 +6,9 @@ package git
|
|||
|
||||
import (
|
||||
"github.com/emirpasic/gods/trees/binaryheap"
|
||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
||||
cgobject "gopkg.in/src-d/go-git.v4/plumbing/object/commitgraph"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
cgobject "github.com/go-git/go-git/v5/plumbing/object/commitgraph"
|
||||
)
|
||||
|
||||
// GetCommitsInfo gets information of all commits that are corresponding to these entries
|
||||
|
|
|
@ -7,7 +7,7 @@ package git
|
|||
import (
|
||||
"io/ioutil"
|
||||
|
||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
)
|
||||
|
||||
// NotesRef is the git ref where Gitea will look for git-notes data.
|
||||
|
|
|
@ -9,8 +9,8 @@ import (
|
|||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"gopkg.in/src-d/go-git.v4/plumbing/filemode"
|
||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
||||
"github.com/go-git/go-git/v5/plumbing/filemode"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
)
|
||||
|
||||
// ParseTreeEntries parses the output of a `git ls-tree` command.
|
||||
|
|
|
@ -7,9 +7,9 @@ package git
|
|||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/go-git/go-git/v5/plumbing/filemode"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"gopkg.in/src-d/go-git.v4/plumbing/filemode"
|
||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
||||
)
|
||||
|
||||
func TestParseTreeEntries(t *testing.T) {
|
||||
|
|
|
@ -18,11 +18,11 @@ import (
|
|||
"time"
|
||||
|
||||
gitealog "code.gitea.io/gitea/modules/log"
|
||||
"github.com/go-git/go-billy/v5/osfs"
|
||||
gogit "github.com/go-git/go-git/v5"
|
||||
"github.com/go-git/go-git/v5/plumbing/cache"
|
||||
"github.com/go-git/go-git/v5/storage/filesystem"
|
||||
"github.com/unknwon/com"
|
||||
"gopkg.in/src-d/go-billy.v4/osfs"
|
||||
gogit "gopkg.in/src-d/go-git.v4"
|
||||
"gopkg.in/src-d/go-git.v4/plumbing/cache"
|
||||
"gopkg.in/src-d/go-git.v4/storage/filesystem"
|
||||
)
|
||||
|
||||
// Repository represents a Git repository.
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
package git
|
||||
|
||||
import (
|
||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
)
|
||||
|
||||
func (repo *Repository) getBlob(id SHA1) (*Blob, error) {
|
||||
|
|
|
@ -9,7 +9,7 @@ import (
|
|||
"fmt"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
)
|
||||
|
||||
// BranchPrefix base dir of the branch information file store on git
|
||||
|
|
|
@ -12,15 +12,20 @@ import (
|
|||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
"github.com/mcuadros/go-version"
|
||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
||||
)
|
||||
|
||||
// GetRefCommitID returns the last commit ID string of given reference (branch or tag).
|
||||
func (repo *Repository) GetRefCommitID(name string) (string, error) {
|
||||
ref, err := repo.gogitRepo.Reference(plumbing.ReferenceName(name), true)
|
||||
if err != nil {
|
||||
if err == plumbing.ErrReferenceNotFound {
|
||||
return "", ErrNotExist{
|
||||
ID: name,
|
||||
}
|
||||
}
|
||||
return "", err
|
||||
}
|
||||
|
||||
|
@ -89,9 +94,15 @@ func (repo *Repository) getCommit(id SHA1) (*Commit, error) {
|
|||
gogitCommit, err := repo.gogitRepo.CommitObject(id)
|
||||
if err == plumbing.ErrObjectNotFound {
|
||||
tagObject, err = repo.gogitRepo.TagObject(id)
|
||||
if err == plumbing.ErrObjectNotFound {
|
||||
return nil, ErrNotExist{
|
||||
ID: id.String(),
|
||||
}
|
||||
}
|
||||
if err == nil {
|
||||
gogitCommit, err = repo.gogitRepo.CommitObject(tagObject.Target)
|
||||
}
|
||||
// if we get a plumbing.ErrObjectNotFound here then the repository is broken and it should be 500
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -318,7 +329,7 @@ func (repo *Repository) CommitsBetween(last *Commit, before *Commit) (*list.List
|
|||
var stdout []byte
|
||||
var err error
|
||||
if before == nil {
|
||||
stdout, err = NewCommand("rev-list", before.ID.String()).RunInDirBytes(repo.Path)
|
||||
stdout, err = NewCommand("rev-list", last.ID.String()).RunInDirBytes(repo.Path)
|
||||
} else {
|
||||
stdout, err = NewCommand("rev-list", before.ID.String()+"..."+last.ID.String()).RunInDirBytes(repo.Path)
|
||||
}
|
||||
|
|
|
@ -11,8 +11,8 @@ import (
|
|||
|
||||
gitealog "code.gitea.io/gitea/modules/log"
|
||||
|
||||
"gopkg.in/src-d/go-git.v4/plumbing/format/commitgraph"
|
||||
cgobject "gopkg.in/src-d/go-git.v4/plumbing/object/commitgraph"
|
||||
"github.com/go-git/go-git/v5/plumbing/format/commitgraph"
|
||||
cgobject "github.com/go-git/go-git/v5/plumbing/object/commitgraph"
|
||||
)
|
||||
|
||||
// CommitNodeIndex returns the index for walking commit graph
|
||||
|
|
|
@ -7,8 +7,8 @@ package git
|
|||
import (
|
||||
"strings"
|
||||
|
||||
"gopkg.in/src-d/go-git.v4"
|
||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
||||
"github.com/go-git/go-git/v5"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
)
|
||||
|
||||
// GetRefs returns all references of the repository.
|
||||
|
|
|
@ -9,8 +9,8 @@ import (
|
|||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
"github.com/mcuadros/go-version"
|
||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
||||
)
|
||||
|
||||
// TagPrefix tags prefix path on the repository
|
||||
|
|
|
@ -10,7 +10,7 @@ import (
|
|||
"fmt"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
)
|
||||
|
||||
// EmptySHA defines empty git SHA
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue