forked from p85947160/gitea
Compare commits
125 Commits
main
...
lunny/disp
Author | SHA1 | Date |
---|---|---|
Lunny Xiao | 1abcd2fda1 | |
guillep2k | e4a876cee1 | |
guillep2k | abb534ba7a | |
James Lakin | 65dceb6a40 | |
6543 | db26f0aca9 | |
Andreas Shimokawa | 76878fd69b | |
zeripath | 3444fa2dd7 | |
zeripath | caa2aeaa52 | |
6543 | 11300ee582 | |
John Olheiser | c6b78c3d31 | |
zeripath | 4c40aa5be9 | |
6543 | 50f2e90b76 | |
zeripath | 5d11ccc9e1 | |
guillep2k | 93860af542 | |
James Lakin | 7bf5834f2c | |
John Olheiser | 1fbdd9335f | |
John Olheiser | e9061a537c | |
John Olheiser | ed664a9e1d | |
John Olheiser | 4cb18601ff | |
oscar.lofwenhamn | 3abb25166c | |
6543 | 9e6ad64d48 | |
Lunny Xiao | b51d7c459e | |
zeripath | d3b6f001fe | |
silverwind | e938f1d945 | |
guillep2k | 7284327a00 | |
guillep2k | 919f3f11e2 | |
guillep2k | 3cee15e6f9 | |
Lunny Xiao | 34e3644ada | |
guillep2k | 14bd120cdc | |
6543 | 3e40f8bebc | |
zeripath | df5f1d9dca | |
John Olheiser | 457ee1ab5a | |
zeripath | 4f64688902 | |
zeripath | 117dcf1c02 | |
mrsdizzie | 4529a262c0 | |
zeripath | ff24f81a05 | |
Antoine GIRARD | e3cb4f9d0e | |
zeripath | 9b7890f1cc | |
Antoine GIRARD | eb064dfda2 | |
zeripath | f9e66e5a46 | |
zeripath | ef89260cf1 | |
zeripath | 315d928626 | |
guillep2k | 5525452bdf | |
Lunny Xiao | 987cd277f6 | |
zeripath | 5cdfde2ebf | |
jaqra | 1cd6233cef | |
James Lakin | cb81e39f7a | |
Lauris BH | 8efd6b32e2 | |
6543 | c95d9603ea | |
John Olheiser | 9169b39458 | |
Lauris BH | 80eb50655a | |
6543 | b16c555541 | |
guillep2k | b5b44364e3 | |
guillep2k | 6af58022c8 | |
guillep2k | e48b460a0a | |
John Olheiser | 2cd2614eaa | |
6543 | 0129e76ef5 | |
6543 | 6896dad675 | |
zeripath | 1ed4323005 | |
Lunny Xiao | 049af0d3d0 | |
6543 | f5727d83dd | |
Lauris BH | 912ce27421 | |
6543 | b3549bb5ec | |
Lunny Xiao | 491cbeca67 | |
zeripath | 895d92ffe5 | |
zeripath | 4b11f967bd | |
Lunny Xiao | 1e73dd2446 | |
6543 | 315026c2c5 | |
Lunny Xiao | 16dfd9ffbe | |
Lunny Xiao | 16f7b43903 | |
techknowlogick | 043febdbc9 | |
guillep2k | 60f91d56f0 | |
guillep2k | 16fc15ae6a | |
techknowlogick | ef8f6d99f1 | |
John Olheiser | 4b688135f9 | |
John Olheiser | e24861a546 | |
6543 | 128cc34344 | |
Lauris BH | f82a805478 | |
Lunny Xiao | 0dced15c1a | |
John Olheiser | db9342c854 | |
zeripath | 79c1d48532 | |
zeripath | 05b9864086 | |
zeripath | ff508c9c9b | |
Lunny Xiao | f96c1a2c79 | |
6543 | ce756ee89f | |
zeripath | f2e9d4b851 | |
zeripath | e878d743f4 | |
6543 | 3fa14d89a2 | |
zeripath | bcb722daec | |
Lunny Xiao | 8add1dfacc | |
David Svantesson | aa6ed1b7c1 | |
6543 | 95cb921097 | |
6543 | 6730df9e8c | |
Moritz | b577500a54 | |
Lunny Xiao | fe46185407 | |
Lunny Xiao | 69a2a29c33 | |
Lunny Xiao | f766719895 | |
Lunny Xiao | e2ddc42377 | |
John Olheiser | 3521177a34 | |
Lunny Xiao | c8bb0ecf52 | |
zeripath | dbe6136348 | |
6543 | 6d1f7e90cf | |
David Svantesson | 42663a687c | |
6543 | 73c90c26d4 | |
John Olheiser | c579ad92b5 | |
6543 | 602c5da953 | |
6543 | 1980e59ac2 | |
Lunny Xiao | 28508792ba | |
silverwind | 3e23dad075 | |
zeripath | b13b9d3dbd | |
zeripath | 4072f28e60 | |
6543 | dbeef6bb02 | |
silverwind | fec35440db | |
zeripath | f8ea50cc7a | |
zeripath | 0e53a16cca | |
zeripath | 7eaba6ba8a | |
guillep2k | ff16099c6d | |
John Olheiser | a516a7ba0f | |
silverwind | 11bce6fd3d | |
techknowlogick | 3fb906dc02 | |
zeripath | 3a00a690c9 | |
John Olheiser | a2b7cc1bb1 | |
John Olheiser | 04a77b1f42 | |
John Olheiser | f523372d07 | |
6543 | e39c238ef4 |
|
@ -1,44 +1,57 @@
|
||||||
|
# The full repository name
|
||||||
repo: go-gitea/gitea
|
repo: go-gitea/gitea
|
||||||
|
|
||||||
|
# Service type (gitea or github)
|
||||||
|
service: github
|
||||||
|
|
||||||
|
# Base URL for Gitea instance if using gitea service type (optional)
|
||||||
|
# Default: https://gitea.com
|
||||||
|
base-url:
|
||||||
|
|
||||||
|
# Changelog groups and which labeled PRs to add to each group
|
||||||
groups:
|
groups:
|
||||||
-
|
-
|
||||||
name: BREAKING
|
name: BREAKING
|
||||||
labels:
|
labels:
|
||||||
- kind/breaking
|
- kind/breaking
|
||||||
-
|
-
|
||||||
name: FEATURE
|
name: FEATURE
|
||||||
labels:
|
labels:
|
||||||
- kind/feature
|
- kind/feature
|
||||||
|
-
|
||||||
|
name: SECURITY
|
||||||
|
labels:
|
||||||
|
- kind/security
|
||||||
-
|
-
|
||||||
name: BUGFIXES
|
name: BUGFIXES
|
||||||
labels:
|
labels:
|
||||||
- kind/bug
|
- kind/bug
|
||||||
-
|
-
|
||||||
name: ENHANCEMENT
|
name: ENHANCEMENT
|
||||||
labels:
|
labels:
|
||||||
- kind/enhancement
|
- kind/enhancement
|
||||||
- kind/refactor
|
- kind/refactor
|
||||||
- kind/ui
|
- kind/ui
|
||||||
-
|
-
|
||||||
name: SECURITY
|
|
||||||
labels:
|
|
||||||
- kind/security
|
|
||||||
-
|
|
||||||
name: TESTING
|
name: TESTING
|
||||||
labels:
|
labels:
|
||||||
- kind/testing
|
- kind/testing
|
||||||
-
|
-
|
||||||
name: TRANSLATION
|
name: TRANSLATION
|
||||||
labels:
|
labels:
|
||||||
- kind/translation
|
- kind/translation
|
||||||
-
|
-
|
||||||
name: BUILD
|
name: BUILD
|
||||||
labels:
|
labels:
|
||||||
- kind/build
|
- kind/build
|
||||||
- kind/lint
|
- kind/lint
|
||||||
-
|
-
|
||||||
name: DOCS
|
name: DOCS
|
||||||
labels:
|
labels:
|
||||||
- kind/docs
|
- kind/docs
|
||||||
-
|
-
|
||||||
name: MISC
|
name: MISC
|
||||||
default: true
|
default: true
|
||||||
|
|
||||||
|
# regex indicating which labels to skip for the changelog
|
||||||
|
skip-labels: skip-changelog|backport\/.+
|
||||||
|
|
409
.drone.yml
409
.drone.yml
|
@ -1,6 +1,61 @@
|
||||||
---
|
---
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
name: testing
|
name: compliance
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: arm64
|
||||||
|
|
||||||
|
workspace:
|
||||||
|
base: /go
|
||||||
|
path: src/code.gitea.io/gitea
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: pre-build
|
||||||
|
pull: always
|
||||||
|
image: node:10 # this step is kept at the lowest version of node that we support
|
||||||
|
commands:
|
||||||
|
- make css
|
||||||
|
- make js
|
||||||
|
|
||||||
|
- name: build-without-gcc
|
||||||
|
pull: always
|
||||||
|
image: golang:1.11 # this step is kept as the lowest version of golang that we support
|
||||||
|
environment:
|
||||||
|
GO111MODULE: on
|
||||||
|
GOPROXY: off
|
||||||
|
commands:
|
||||||
|
- go build -mod=vendor -o gitea_no_gcc # test if build succeeds without the sqlite tag
|
||||||
|
|
||||||
|
- name: build-linux-386
|
||||||
|
pull: always
|
||||||
|
image: golang:1.13
|
||||||
|
environment:
|
||||||
|
GO111MODULE: on
|
||||||
|
GOPROXY: off
|
||||||
|
GOOS: linux
|
||||||
|
GOARCH: 386
|
||||||
|
commands:
|
||||||
|
- go build -mod=vendor -o gitea_linux_386 # test if compatible with 32 bit
|
||||||
|
|
||||||
|
- name: check
|
||||||
|
pull: always
|
||||||
|
image: golang:1.13
|
||||||
|
commands:
|
||||||
|
- make clean
|
||||||
|
- make golangci-lint
|
||||||
|
- make revive
|
||||||
|
- make swagger-check
|
||||||
|
- make swagger-validate
|
||||||
|
- make test-vendor
|
||||||
|
environment:
|
||||||
|
GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not
|
||||||
|
GOSUMDB: sum.golang.org
|
||||||
|
TAGS: bindata sqlite sqlite_unlock_notify
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: testing-amd64
|
||||||
|
|
||||||
platform:
|
platform:
|
||||||
os: linux
|
os: linux
|
||||||
|
@ -25,15 +80,9 @@ services:
|
||||||
MYSQL_ALLOW_EMPTY_PASSWORD: yes
|
MYSQL_ALLOW_EMPTY_PASSWORD: yes
|
||||||
MYSQL_DATABASE: testgitea
|
MYSQL_DATABASE: testgitea
|
||||||
|
|
||||||
- name: pgsql
|
|
||||||
pull: default
|
|
||||||
image: postgres:9.5
|
|
||||||
environment:
|
|
||||||
POSTGRES_DB: test
|
|
||||||
|
|
||||||
- name: mssql
|
- name: mssql
|
||||||
pull: default
|
pull: default
|
||||||
image: microsoft/mssql-server-linux:latest
|
image: mcr.microsoft.com/mssql/server:latest
|
||||||
environment:
|
environment:
|
||||||
ACCEPT_EULA: Y
|
ACCEPT_EULA: Y
|
||||||
MSSQL_PID: Standard
|
MSSQL_PID: Standard
|
||||||
|
@ -54,52 +103,23 @@ steps:
|
||||||
exclude:
|
exclude:
|
||||||
- pull_request
|
- pull_request
|
||||||
|
|
||||||
- name: pre-build
|
|
||||||
pull: always
|
|
||||||
image: node:10 # this step is kept at the lowest version of node that we support
|
|
||||||
commands:
|
|
||||||
- make css
|
|
||||||
- make js
|
|
||||||
|
|
||||||
- name: build-without-gcc
|
|
||||||
pull: always
|
|
||||||
image: golang:1.11 # this step is kept as the lowest version of golang that we support
|
|
||||||
environment:
|
|
||||||
GO111MODULE: on
|
|
||||||
GOPROXY: off
|
|
||||||
commands:
|
|
||||||
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
|
||||||
- go build -mod=vendor -o gitea_no_gcc # test if build succeeds without the sqlite tag
|
|
||||||
|
|
||||||
- name: build-linux-386
|
|
||||||
pull: always
|
|
||||||
image: golang:1.13
|
|
||||||
environment:
|
|
||||||
GO111MODULE: on
|
|
||||||
GOPROXY: off
|
|
||||||
GOOS: linux
|
|
||||||
GOARCH: 386
|
|
||||||
commands:
|
|
||||||
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
|
||||||
- go build -mod=vendor -o gitea_linux_386 # test if compatible with 32 bit
|
|
||||||
|
|
||||||
- name: build
|
- name: build
|
||||||
pull: always
|
pull: always
|
||||||
image: golang:1.13
|
image: golang:1.13
|
||||||
commands:
|
commands:
|
||||||
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
||||||
- make clean
|
|
||||||
- make golangci-lint
|
|
||||||
- make revive
|
|
||||||
- make swagger-check
|
|
||||||
- make swagger-validate
|
|
||||||
- make test-vendor
|
|
||||||
- make build
|
- make build
|
||||||
environment:
|
environment:
|
||||||
GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not
|
GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not
|
||||||
GOSUMDB: sum.golang.org
|
GOSUMDB: sum.golang.org
|
||||||
TAGS: bindata sqlite sqlite_unlock_notify
|
TAGS: bindata sqlite sqlite_unlock_notify
|
||||||
|
|
||||||
|
- name: tag-pre-condition
|
||||||
|
pull: always
|
||||||
|
image: alpine/git
|
||||||
|
commands:
|
||||||
|
- git update-ref refs/heads/tag_test ${DRONE_COMMIT_SHA}
|
||||||
|
|
||||||
- name: unit-test
|
- name: unit-test
|
||||||
pull: always
|
pull: always
|
||||||
image: golang:1.13
|
image: golang:1.13
|
||||||
|
@ -108,70 +128,8 @@ steps:
|
||||||
environment:
|
environment:
|
||||||
GOPROXY: off
|
GOPROXY: off
|
||||||
TAGS: bindata sqlite sqlite_unlock_notify
|
TAGS: bindata sqlite sqlite_unlock_notify
|
||||||
depends_on:
|
GITHUB_READ_TOKEN:
|
||||||
- build
|
from_secret: github_read_token
|
||||||
when:
|
|
||||||
branch:
|
|
||||||
- master
|
|
||||||
event:
|
|
||||||
- push
|
|
||||||
- pull_request
|
|
||||||
|
|
||||||
- name: release-test
|
|
||||||
pull: always
|
|
||||||
image: golang:1.13
|
|
||||||
commands:
|
|
||||||
- make test
|
|
||||||
environment:
|
|
||||||
GOPROXY: off
|
|
||||||
TAGS: bindata sqlite sqlite_unlock_notify
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
when:
|
|
||||||
branch:
|
|
||||||
- "release/*"
|
|
||||||
event:
|
|
||||||
- push
|
|
||||||
- pull_request
|
|
||||||
|
|
||||||
- name: tag-pre-condition
|
|
||||||
pull: always
|
|
||||||
image: alpine/git
|
|
||||||
commands:
|
|
||||||
- git update-ref refs/heads/tag_test ${DRONE_COMMIT_SHA}
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
when:
|
|
||||||
event:
|
|
||||||
- tag
|
|
||||||
|
|
||||||
- name: tag-test
|
|
||||||
pull: always
|
|
||||||
image: golang:1.13
|
|
||||||
commands:
|
|
||||||
- make test
|
|
||||||
environment:
|
|
||||||
GOPROXY: off
|
|
||||||
TAGS: bindata
|
|
||||||
depends_on:
|
|
||||||
- tag-pre-condition
|
|
||||||
when:
|
|
||||||
event:
|
|
||||||
- tag
|
|
||||||
|
|
||||||
- name: test-sqlite
|
|
||||||
pull: always
|
|
||||||
image: golang:1.13
|
|
||||||
commands:
|
|
||||||
- "curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash"
|
|
||||||
- apt-get install -y git-lfs
|
|
||||||
- timeout -s ABRT 20m make test-sqlite-migration
|
|
||||||
- timeout -s ABRT 20m make test-sqlite
|
|
||||||
environment:
|
|
||||||
GOPROXY: off
|
|
||||||
TAGS: bindata
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
|
|
||||||
- name: test-mysql
|
- name: test-mysql
|
||||||
pull: always
|
pull: always
|
||||||
|
@ -187,30 +145,6 @@ steps:
|
||||||
TEST_LDAP: 1
|
TEST_LDAP: 1
|
||||||
depends_on:
|
depends_on:
|
||||||
- build
|
- build
|
||||||
when:
|
|
||||||
branch:
|
|
||||||
- master
|
|
||||||
event:
|
|
||||||
- push
|
|
||||||
- pull_request
|
|
||||||
|
|
||||||
- name: tag-test-mysql
|
|
||||||
pull: always
|
|
||||||
image: golang:1.13
|
|
||||||
commands:
|
|
||||||
- "curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash"
|
|
||||||
- apt-get install -y git-lfs
|
|
||||||
- timeout -s ABRT 20m make test-mysql-migration
|
|
||||||
- timeout -s ABRT 20m make test-mysql
|
|
||||||
environment:
|
|
||||||
GOPROXY: off
|
|
||||||
TAGS: bindata
|
|
||||||
TEST_LDAP: 1
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
when:
|
|
||||||
event:
|
|
||||||
- tag
|
|
||||||
|
|
||||||
- name: test-mysql8
|
- name: test-mysql8
|
||||||
pull: always
|
pull: always
|
||||||
|
@ -227,21 +161,6 @@ steps:
|
||||||
depends_on:
|
depends_on:
|
||||||
- build
|
- build
|
||||||
|
|
||||||
- name: test-pgsql
|
|
||||||
pull: always
|
|
||||||
image: golang:1.13
|
|
||||||
commands:
|
|
||||||
- "curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash"
|
|
||||||
- apt-get install -y git-lfs
|
|
||||||
- timeout -s ABRT 20m make test-pgsql-migration
|
|
||||||
- timeout -s ABRT 20m make test-pgsql
|
|
||||||
environment:
|
|
||||||
GOPROXY: off
|
|
||||||
TAGS: bindata
|
|
||||||
TEST_LDAP: 1
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
|
|
||||||
- name: test-mssql
|
- name: test-mssql
|
||||||
pull: always
|
pull: always
|
||||||
image: golang:1.13
|
image: golang:1.13
|
||||||
|
@ -293,13 +212,90 @@ steps:
|
||||||
- push
|
- push
|
||||||
- pull_request
|
- pull_request
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: testing-arm64
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: arm64
|
||||||
|
|
||||||
|
workspace:
|
||||||
|
base: /go
|
||||||
|
path: src/code.gitea.io/gitea
|
||||||
|
|
||||||
|
services:
|
||||||
|
- name: pgsql
|
||||||
|
pull: default
|
||||||
|
image: postgres:9.5
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: test
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
|
||||||
|
- name: ldap
|
||||||
|
pull: default
|
||||||
|
image: gitea/test-openldap:latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: fetch-tags
|
||||||
|
pull: default
|
||||||
|
image: docker:git
|
||||||
|
commands:
|
||||||
|
- git fetch --tags --force
|
||||||
|
when:
|
||||||
|
event:
|
||||||
|
exclude:
|
||||||
|
- pull_request
|
||||||
|
|
||||||
|
- name: build
|
||||||
|
pull: always
|
||||||
|
image: golang:1.13
|
||||||
|
commands:
|
||||||
|
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
||||||
|
- make build
|
||||||
|
environment:
|
||||||
|
GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not
|
||||||
|
GOSUMDB: sum.golang.org
|
||||||
|
TAGS: bindata sqlite sqlite_unlock_notify
|
||||||
|
|
||||||
|
- name: test-sqlite
|
||||||
|
pull: always
|
||||||
|
image: golang:1.13
|
||||||
|
commands:
|
||||||
|
- "curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash"
|
||||||
|
- apt-get install -y git-lfs
|
||||||
|
- timeout -s ABRT 20m make test-sqlite-migration
|
||||||
|
- timeout -s ABRT 20m make test-sqlite
|
||||||
|
environment:
|
||||||
|
GOPROXY: off
|
||||||
|
TAGS: bindata
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
|
- name: test-pgsql
|
||||||
|
pull: always
|
||||||
|
image: golang:1.13
|
||||||
|
commands:
|
||||||
|
- "curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash"
|
||||||
|
- apt-get install -y git-lfs
|
||||||
|
- timeout -s ABRT 20m make test-pgsql-migration
|
||||||
|
- timeout -s ABRT 20m make test-pgsql
|
||||||
|
environment:
|
||||||
|
GOPROXY: off
|
||||||
|
TAGS: bindata
|
||||||
|
TEST_LDAP: 1
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
---
|
---
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
name: translations
|
name: translations
|
||||||
|
|
||||||
platform:
|
platform:
|
||||||
os: linux
|
os: linux
|
||||||
arch: amd64
|
arch: arm64
|
||||||
|
|
||||||
workspace:
|
workspace:
|
||||||
base: /go
|
base: /go
|
||||||
|
@ -378,7 +374,8 @@ trigger:
|
||||||
- push
|
- push
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- testing
|
- testing-amd64
|
||||||
|
- testing-arm64
|
||||||
- translations
|
- translations
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
@ -476,7 +473,8 @@ trigger:
|
||||||
- tag
|
- tag
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- testing
|
- testing-arm64
|
||||||
|
- testing-amd64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: fetch-tags
|
- name: fetch-tags
|
||||||
|
@ -545,17 +543,14 @@ name: docs
|
||||||
|
|
||||||
platform:
|
platform:
|
||||||
os: linux
|
os: linux
|
||||||
arch: amd64
|
arch: arm64
|
||||||
|
|
||||||
workspace:
|
|
||||||
base: /go
|
|
||||||
path: src/code.gitea.io/gitea
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: build-docs
|
- name: build-docs
|
||||||
pull: always
|
pull: always
|
||||||
image: webhippie/hugo:latest
|
image: plugins/hugo:latest
|
||||||
commands:
|
commands:
|
||||||
|
- apk add --no-cache make bash curl
|
||||||
- cd docs
|
- cd docs
|
||||||
- make trans-copy
|
- make trans-copy
|
||||||
- make clean
|
- make clean
|
||||||
|
@ -563,7 +558,7 @@ steps:
|
||||||
|
|
||||||
- name: publish-docs
|
- name: publish-docs
|
||||||
pull: always
|
pull: always
|
||||||
image: lucap/drone-netlify:latest
|
image: techknowlogick/drone-netlify:latest
|
||||||
settings:
|
settings:
|
||||||
path: docs/public/
|
path: docs/public/
|
||||||
site_id: d2260bae-7861-4c02-8646-8f6440b12672
|
site_id: d2260bae-7861-4c02-8646-8f6440b12672
|
||||||
|
@ -578,7 +573,7 @@ steps:
|
||||||
|
|
||||||
---
|
---
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
name: docker-linux-amd64
|
name: docker-linux-amd64-release
|
||||||
|
|
||||||
platform:
|
platform:
|
||||||
os: linux
|
os: linux
|
||||||
|
@ -589,13 +584,13 @@ workspace:
|
||||||
path: src/code.gitea.io/gitea
|
path: src/code.gitea.io/gitea
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- testing
|
- testing-amd64
|
||||||
|
- testing-arm64
|
||||||
|
|
||||||
trigger:
|
trigger:
|
||||||
ref:
|
ref:
|
||||||
- refs/heads/master
|
- refs/heads/master
|
||||||
- "refs/tags/**"
|
- "refs/tags/**"
|
||||||
- "refs/pull/**"
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: fetch-tags
|
- name: fetch-tags
|
||||||
|
@ -603,23 +598,6 @@ steps:
|
||||||
image: docker:git
|
image: docker:git
|
||||||
commands:
|
commands:
|
||||||
- git fetch --tags --force
|
- git fetch --tags --force
|
||||||
when:
|
|
||||||
event:
|
|
||||||
exclude:
|
|
||||||
- pull_request
|
|
||||||
|
|
||||||
- name: dryrun
|
|
||||||
pull: always
|
|
||||||
image: plugins/docker:linux-amd64
|
|
||||||
settings:
|
|
||||||
dry_run: true
|
|
||||||
repo: gitea/gitea
|
|
||||||
tags: linux-amd64
|
|
||||||
build_args:
|
|
||||||
- GOPROXY=off
|
|
||||||
when:
|
|
||||||
event:
|
|
||||||
- pull_request
|
|
||||||
|
|
||||||
- name: publish
|
- name: publish
|
||||||
pull: always
|
pull: always
|
||||||
|
@ -641,7 +619,7 @@ steps:
|
||||||
|
|
||||||
---
|
---
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
name: docker-linux-arm64
|
name: docker-linux-arm64-dry-run
|
||||||
|
|
||||||
platform:
|
platform:
|
||||||
os: linux
|
os: linux
|
||||||
|
@ -652,25 +630,13 @@ workspace:
|
||||||
path: src/code.gitea.io/gitea
|
path: src/code.gitea.io/gitea
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- testing
|
- compliance
|
||||||
|
|
||||||
trigger:
|
trigger:
|
||||||
ref:
|
ref:
|
||||||
- refs/heads/master
|
|
||||||
- "refs/tags/**"
|
|
||||||
- "refs/pull/**"
|
- "refs/pull/**"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: fetch-tags
|
|
||||||
pull: default
|
|
||||||
image: docker:git
|
|
||||||
commands:
|
|
||||||
- git fetch --tags --force
|
|
||||||
when:
|
|
||||||
event:
|
|
||||||
exclude:
|
|
||||||
- pull_request
|
|
||||||
|
|
||||||
- name: dryrun
|
- name: dryrun
|
||||||
pull: always
|
pull: always
|
||||||
image: plugins/docker:linux-arm64
|
image: plugins/docker:linux-arm64
|
||||||
|
@ -684,6 +650,33 @@ steps:
|
||||||
event:
|
event:
|
||||||
- pull_request
|
- pull_request
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: docker-linux-arm64-release
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: arm64
|
||||||
|
|
||||||
|
workspace:
|
||||||
|
base: /go
|
||||||
|
path: src/code.gitea.io/gitea
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- testing-amd64
|
||||||
|
- testing-arm64
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
ref:
|
||||||
|
- refs/heads/master
|
||||||
|
- "refs/tags/**"
|
||||||
|
steps:
|
||||||
|
- name: fetch-tags
|
||||||
|
pull: default
|
||||||
|
image: docker:git
|
||||||
|
commands:
|
||||||
|
- git fetch --tags --force
|
||||||
|
|
||||||
- name: publish
|
- name: publish
|
||||||
pull: always
|
pull: always
|
||||||
image: plugins/docker:linux-arm64
|
image: plugins/docker:linux-arm64
|
||||||
|
@ -729,45 +722,49 @@ trigger:
|
||||||
- "refs/tags/**"
|
- "refs/tags/**"
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-linux-amd64
|
- docker-linux-amd64-release
|
||||||
- docker-linux-arm64
|
- docker-linux-arm64-release
|
||||||
|
|
||||||
---
|
---
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
name: notify
|
name: notifications
|
||||||
|
|
||||||
platform:
|
platform:
|
||||||
os: linux
|
os: linux
|
||||||
arch: amd64
|
arch: arm64
|
||||||
|
|
||||||
workspace:
|
|
||||||
base: /go
|
|
||||||
path: src/code.gitea.io/gitea
|
|
||||||
|
|
||||||
clone:
|
clone:
|
||||||
disable: true
|
disable: true
|
||||||
|
|
||||||
when:
|
trigger:
|
||||||
|
branch:
|
||||||
|
- master
|
||||||
|
- "release/*"
|
||||||
|
event:
|
||||||
|
- push
|
||||||
|
- tag
|
||||||
status:
|
status:
|
||||||
- success
|
- success
|
||||||
- failure
|
- failure
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- testing
|
- testing-amd64
|
||||||
|
- testing-arm64
|
||||||
- translations
|
- translations
|
||||||
- release-version
|
- release-version
|
||||||
- release-master
|
- release-master
|
||||||
- docker-linux-amd64
|
- docker-linux-amd64-release
|
||||||
- docker-linux-arm64
|
- docker-linux-arm64-release
|
||||||
- docker-manifest
|
- docker-manifest
|
||||||
- docs
|
- docs
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: discord
|
- name: discord
|
||||||
pull: always
|
pull: always
|
||||||
image: appleboy/drone-discord:1.0.0
|
image: appleboy/drone-discord:1.2.4
|
||||||
environment:
|
settings:
|
||||||
DISCORD_WEBHOOK_ID:
|
message: "{{#success build.status}} ✅ Build #{{build.number}} of `{{repo.name}}` succeeded.\n\n📝 Commit by {{commit.author}} on `{{commit.branch}}`:\n``` {{commit.message}} ```\n\n🌐 {{ build.link }} {{else}} ❌ Build #{{build.number}} of `{{repo.name}}` failed.\n\n📝 Commit by {{commit.author}} on `{{commit.branch}}`:\n``` {{commit.message}} ```\n\n🌐 {{ build.link }} {{/success}}\n"
|
||||||
|
webhook_id:
|
||||||
from_secret: discord_webhook_id
|
from_secret: discord_webhook_id
|
||||||
DISCORD_WEBHOOK_TOKEN:
|
webhook_token:
|
||||||
from_secret: discord_webhook_token
|
from_secret: discord_webhook_token
|
||||||
|
|
|
@ -69,6 +69,7 @@ coverage.all
|
||||||
/yarn.lock
|
/yarn.lock
|
||||||
/public/js
|
/public/js
|
||||||
/public/css
|
/public/css
|
||||||
|
/VERSION
|
||||||
|
|
||||||
# Snapcraft
|
# Snapcraft
|
||||||
snap/.snapcraft/
|
snap/.snapcraft/
|
||||||
|
|
134
CHANGELOG.md
134
CHANGELOG.md
|
@ -4,13 +4,36 @@ This changelog goes through all the changes that have been made in each release
|
||||||
without substantial changes to our git log; to see the highlights of what has
|
without substantial changes to our git log; to see the highlights of what has
|
||||||
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||||
|
|
||||||
## [1.11.0-RC1](https://github.com/go-gitea/gitea/releases/tag/v1.11.0-rc1) - 2020-01-07
|
## [1.11.1](https://github.com/go-gitea/gitea/releases/tag/v1.11.1) - 2020-02-15
|
||||||
|
|
||||||
|
* BUGFIXES
|
||||||
|
* Repo name added to automatically generated commit message when merging (#9997) (#10285)
|
||||||
|
* Fix Workerpool deadlock (#10283) (#10284)
|
||||||
|
* Divide GetIssueStats query in smaller chunks (#10176) (#10282)
|
||||||
|
* Fix reply on code review (#10257)
|
||||||
|
* Stop hanging issue indexer initialisation from preventing shutdown (#10243) (#10249)
|
||||||
|
* Fix filter label emoji width (#10241) (#10244)
|
||||||
|
* Fix issue sidebar menus having an infinite height (#10239) (#10240)
|
||||||
|
* Fix commit between two commits calculation if there is only last commit (#10225) (#10226)
|
||||||
|
* Only check for conflicts/merging if the PR has not been merged in the interim (#10132) (#10206)
|
||||||
|
* Blacklist manifest.json & milestones user (#10292) (#10293)
|
||||||
|
|
||||||
|
## [1.11.0](https://github.com/go-gitea/gitea/releases/tag/v1.11.0) - 2020-02-10
|
||||||
* BREAKING
|
* BREAKING
|
||||||
|
* Fix followers and following tabs in profile (#10202) (#10203)
|
||||||
|
* Make CertFile and KeyFile relative to CustomPath (#9868) (#9874)
|
||||||
* Remove unused endpoints (#9538)
|
* Remove unused endpoints (#9538)
|
||||||
* Prefix all user-generated IDs in markup (#9477)
|
* Prefix all user-generated IDs in markup (#9477)
|
||||||
* Enforce Gitea environment for pushes (#8982)
|
* Enforce Gitea environment for pushes (#8982)
|
||||||
* Hide some user information via API if user have no enough permission (#8655)
|
* Hide some user information via API if user have not enough permissions (#8655)
|
||||||
* Move startpage/homepage translation to crowdin (#8596)
|
* Move startpage/homepage translation to crowdin (#8596)
|
||||||
|
* SECURITY
|
||||||
|
* Never allow an empty password to validate (#9682) (#9683)
|
||||||
|
* Prevent redirect to Host (#9678) (#9679)
|
||||||
|
* Swagger hide search field (#9554)
|
||||||
|
* Add "search" to reserved usernames (#9063)
|
||||||
|
* Switch to fomantic-ui (#9374)
|
||||||
|
* Only serve attachments when linked to issue/release and if accessible by user (#9340)
|
||||||
* FEATURES
|
* FEATURES
|
||||||
* Webhooks should only show sender if it makes sense (#9601)
|
* Webhooks should only show sender if it makes sense (#9601)
|
||||||
* Provide Default messages for merges (#9393)
|
* Provide Default messages for merges (#9393)
|
||||||
|
@ -44,6 +67,68 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||||
* Sign merges, CRUD, Wiki and Repository initialisation with gpg key (#7631)
|
* Sign merges, CRUD, Wiki and Repository initialisation with gpg key (#7631)
|
||||||
* Add basic repository lfs management (#7199)
|
* Add basic repository lfs management (#7199)
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
|
* Fix code-expansion arc-green theme bug (#10180) (#10185)
|
||||||
|
* Prevent double wait-group decrement (#10170) (#10175)
|
||||||
|
* Allow emoji on review head comments (#10159) (#10174)
|
||||||
|
* Fix issue/pull link (#10158) (#10173)
|
||||||
|
* Fix push-create SSH bugs (#10145) (#10151)
|
||||||
|
* Prevent DeleteUser API abuse (#10125) (#10128)
|
||||||
|
* Fix issues/pulls dashboard paging error (#10114) (#10115)
|
||||||
|
* Add button to revert SimpleMDE to plain textarea (#10099) (#10102)
|
||||||
|
* Fix branch page pull request title and link error (#10092) (#10097)
|
||||||
|
* Fix PR API: Only try to get HeadBranch if HeadRepo exist (#10029) (#10088)
|
||||||
|
* Update topics repo count when deleting repository (#10051) (#10081)
|
||||||
|
* Show pull icon on pull requests (#10061) (#10062)
|
||||||
|
* Fix milestone API state parameter unhandled (#10049) (#10052)
|
||||||
|
* Move to using a temporary repo for pushing new PRs (#10009) (#10042)
|
||||||
|
* Fix wiki raw view on sub path (#10002) (#10040)
|
||||||
|
* Ensure that feeds are appropriately restricted (#10018) (#10019)
|
||||||
|
* Sanitize credentials in mirror form (#9975) (#9991)
|
||||||
|
* Close related pull requests when deleting head repository or head branch (#9927) (#9974)
|
||||||
|
* Switch to use -f instead of -F for sendmail (#9961) (#9970)
|
||||||
|
* Fix file rename/copy not supported by indexer (#9965) (#9967)
|
||||||
|
* Fix repo indexer not updating upon push (#9957) (#9963)
|
||||||
|
* Don't convert ellipsis in markdown (#9905) (#9937)
|
||||||
|
* Fixed repo link in generated comment for cross repository dependency (#9863) (#9935)
|
||||||
|
* Check if diff actually contains sections when rendering (#9926) (#9933)
|
||||||
|
* Fix wrong hint when status checking is running on pull request view (#9886) (#9928)
|
||||||
|
* Fix RocketChat (#9908) (#9921)
|
||||||
|
* Do not try to recreate ldap user if they are already created (#9900) (#9919)
|
||||||
|
* Create terminated channel in queue_redis (#9910) (#9911)
|
||||||
|
* Prevent empty LDAP search result from deactivating all users (#9879) (#9896)
|
||||||
|
* Fix wrong permissions check when issues/prs shared operations (#9885) (#9889)
|
||||||
|
* Check user != nil before checking values (#9881) (#9883)
|
||||||
|
* Allow hyphen in language name (#9873) (#9880)
|
||||||
|
* Ensure that 2fa is checked on reset-password (#9857) (#9876)
|
||||||
|
* Fix issues/pulls dependencies problems (#9842) (#9864)
|
||||||
|
* Fix markdown anchor links (#9673) (#9840)
|
||||||
|
* Allow assignee on Pull Creation when Issue Unit is deactivated (#9836) (#9837)
|
||||||
|
* Fix download file wrong content-type (#9825) (#9834)
|
||||||
|
* Fix wrong poster identity on a migrated pull request when submit review (#9827) (#9830)
|
||||||
|
* Fix database dump when log directory is missing (#9818) (#9819)
|
||||||
|
* Fix compare (#9808) (#9814)
|
||||||
|
* Fix push-to-create (#9772) (#9797)
|
||||||
|
* Fix missing msteam webhook on organization (#9781) (#9794)
|
||||||
|
* Fix missing unlock in uniquequeue (#9790) (#9791)
|
||||||
|
* Fix add team on collaborator page when same name as organization (#9778)
|
||||||
|
* DeleteRepoFile incorrectly handles Delete to new branch (#9769) (#9775)
|
||||||
|
* Fix milestones page (#9771)
|
||||||
|
* Fix SimpleMDE quote reply (#9757) (#9768)
|
||||||
|
* Fix missing updated time on migrated issues and comments (#9744) (#9764)
|
||||||
|
* Move Errored PRs out of StatusChecking (#9675) (#9726)
|
||||||
|
* Make hook status printing configurable with delay (#9641) (#9725)
|
||||||
|
* Fix /repos/issues/search (#9698) (#9724)
|
||||||
|
* Silence fomantic error regarding tabs (#9713) (#9718)
|
||||||
|
* Remove unused lock (#9709) (#9710)
|
||||||
|
* Remove q.lock.Unlock() in setInternal to prevent panic (#9705) (#9706)
|
||||||
|
* Load milestone in API PR list (#9671) (#9700)
|
||||||
|
* Don't attempt to close issue if already closed (#9696) (#9699)
|
||||||
|
* Remove google font call (#9668) (#9681)
|
||||||
|
* Eliminate horizontal scroll caused by footer (#9674)
|
||||||
|
* Fix nil reference in repo generation (#9660) (#9666)
|
||||||
|
* Add HTML URL to API Issues (#9654) (#9661)
|
||||||
|
* Add PR review webhook to Telegram (#9653) (#9655)
|
||||||
|
* Use filepath.IsAbs instead of path.IsAbs (#9651) (#9652)
|
||||||
* Disable remove button on repository teams when have access to all (#9640)
|
* Disable remove button on repository teams when have access to all (#9640)
|
||||||
* Clean up old references on branch delete (#9614)
|
* Clean up old references on branch delete (#9614)
|
||||||
* Hide public repos owned by private orgs (#9609)
|
* Hide public repos owned by private orgs (#9609)
|
||||||
|
@ -175,6 +260,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||||
* Fix migrate mirror 500 bug (#8526)
|
* Fix migrate mirror 500 bug (#8526)
|
||||||
* Fix password complexity regex for special characters (on master) (#8525)
|
* Fix password complexity regex for special characters (on master) (#8525)
|
||||||
* ENHANCEMENTS
|
* ENHANCEMENTS
|
||||||
|
* Explicitly refer to PR in squash-merge commit message in case of external tracker (#9844) (#9855)
|
||||||
* Add a /user/login landing page option (#9622)
|
* Add a /user/login landing page option (#9622)
|
||||||
* Some more e-mail notification fixes (#9596)
|
* Some more e-mail notification fixes (#9596)
|
||||||
* Add branch protection option to block merge on requested changes. (#9592)
|
* Add branch protection option to block merge on requested changes. (#9592)
|
||||||
|
@ -291,12 +377,6 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||||
* wiki - add 'write' 'preview' buttons to wiki edit like in issues (#7241)
|
* wiki - add 'write' 'preview' buttons to wiki edit like in issues (#7241)
|
||||||
* Change target branch for pull request (#6488)
|
* Change target branch for pull request (#6488)
|
||||||
* Display PR commits and diffs using base repo rather than forked (#3648)
|
* Display PR commits and diffs using base repo rather than forked (#3648)
|
||||||
* SECURITY
|
|
||||||
* Swagger hide search field (#9554)
|
|
||||||
* Add "search" to reserved usernames (#9063)
|
|
||||||
* Switch to fomantic-ui (#9374)
|
|
||||||
* Only serve attachments when linked to issue/release and if accessible by user (#9340)
|
|
||||||
* Hide credentials when submitting migration through API (#9102)
|
|
||||||
* TESTING
|
* TESTING
|
||||||
* Add debug option to serv to help debug problems (#9492)
|
* Add debug option to serv to help debug problems (#9492)
|
||||||
* Fix the intermittent TestGPGGit failures (#9360)
|
* Fix the intermittent TestGPGGit failures (#9360)
|
||||||
|
@ -310,10 +390,12 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||||
* Update Github Migration Tests (#8893) (#8938)
|
* Update Github Migration Tests (#8893) (#8938)
|
||||||
* Update heatmap fixtures to restore tests (#8615)
|
* Update heatmap fixtures to restore tests (#8615)
|
||||||
* TRANSLATION
|
* TRANSLATION
|
||||||
|
* Fix Korean locales (#9761) (#9780)
|
||||||
* Fix placeholders in the error message (#9060)
|
* Fix placeholders in the error message (#9060)
|
||||||
* Fix spelling of admin.users.max_repo_creation (#8934)
|
* Fix spelling of admin.users.max_repo_creation (#8934)
|
||||||
* Improve german translation of homepage (#8549)
|
* Improve german translation of homepage (#8549)
|
||||||
* BUILD
|
* BUILD
|
||||||
|
* Fix webpack polyfills (#9735) (#9738)
|
||||||
* Update gitea.com/macaron to 1.4.0 (#9608)
|
* Update gitea.com/macaron to 1.4.0 (#9608)
|
||||||
* Upgrade lato fonts to v16. (#9498)
|
* Upgrade lato fonts to v16. (#9498)
|
||||||
* Update alpine to 3.11 (#9440)
|
* Update alpine to 3.11 (#9440)
|
||||||
|
@ -344,6 +426,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||||
* Update the provided gitea.service to mention socket activation (#8531)
|
* Update the provided gitea.service to mention socket activation (#8531)
|
||||||
* Doc added how to setup email (#8520)
|
* Doc added how to setup email (#8520)
|
||||||
* MISC
|
* MISC
|
||||||
|
* Backport Locales [2020-01-14] (#9773)
|
||||||
* Add translatable Powered by Gitea text in footer (#9600)
|
* Add translatable Powered by Gitea text in footer (#9600)
|
||||||
* Add contrib/environment-to-ini (#9519)
|
* Add contrib/environment-to-ini (#9519)
|
||||||
* Remove unnecessary loading of settings in update hook (#9496)
|
* Remove unnecessary loading of settings in update hook (#9496)
|
||||||
|
@ -384,6 +467,37 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||||
* Update CodeMirror to version 5.49.0 (#8381)
|
* Update CodeMirror to version 5.49.0 (#8381)
|
||||||
* Wiki editor: enable side-by-side button (#7242)
|
* Wiki editor: enable side-by-side button (#7242)
|
||||||
|
|
||||||
|
## [1.10.4](https://github.com/go-gitea/gitea/releases/tag/v1.10.4) - 2020-02-16
|
||||||
|
|
||||||
|
* FEATURE
|
||||||
|
* Prevent empty LDAP search from deactivating all users (#9879) (#9890)
|
||||||
|
* BUGFIXES
|
||||||
|
* Fix reply on code review (#10261) (#10227)
|
||||||
|
* Fix branch page pull request title and link error (#10092) (#10098)
|
||||||
|
* Fix milestone API state parameter unhandled (#10049) (#10053)
|
||||||
|
* Fix wiki raw view on sub path (#10002) (#10041)
|
||||||
|
* Fix RocketChat Webhook (#9908) (#9921) (#9925)
|
||||||
|
* Fix bug about wrong dependencies permissions check and other wrong permissions check (#9884) (Partial backport #9842)
|
||||||
|
* Ensure that 2fa is checked on reset-password (#9857) (#9877)
|
||||||
|
|
||||||
|
## [1.10.3](https://github.com/go-gitea/gitea/releases/tag/v1.10.3) - 2020-01-17
|
||||||
|
* SECURITY
|
||||||
|
* Hide credentials when submitting migration (#9102) (#9704)
|
||||||
|
* Never allow an empty password to validate (#9682) (#9684)
|
||||||
|
* Prevent redirect to Host (#9678) (#9680)
|
||||||
|
* Hide public repos owned by private orgs (#9609) (#9616)
|
||||||
|
* BUGFIXES
|
||||||
|
* Allow assignee on Pull Creation when Issue Unit is deactivated (#9836) (#9838)
|
||||||
|
* Fix download file wrong content-type (#9825) (#9835)
|
||||||
|
* Fix wrong identify poster on a migrated pull request when submit review (#9827) (#9831)
|
||||||
|
* Fix dump non-exist log directory (#9818) (#9820)
|
||||||
|
* Fix compare (#9808) (#9815)
|
||||||
|
* Fix missing msteam webhook on organization (#9781) (#9795)
|
||||||
|
* Fix add team on collaborator page when same name as organization (#9783)
|
||||||
|
* Fix cache problem on dashboard (#9358) (#9703)
|
||||||
|
* Send tag create and push webhook when release created on UI (#8671) (#9702)
|
||||||
|
* Branches not at ref commit ID should not be listed as Merged (#9614) (#9639)
|
||||||
|
|
||||||
## [1.10.2](https://github.com/go-gitea/gitea/releases/tag/v1.10.2) - 2020-01-02
|
## [1.10.2](https://github.com/go-gitea/gitea/releases/tag/v1.10.2) - 2020-01-02
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Allow only specific Columns to be updated on Issue via API (#9539) (#9580)
|
* Allow only specific Columns to be updated on Issue via API (#9539) (#9580)
|
||||||
|
@ -1483,13 +1597,13 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Allow resend of confirmation email when logged in (#6482) (#6487)
|
* Allow resend of confirmation email when logged in (#6482) (#6487)
|
||||||
|
|
||||||
## [1.7.5](https://github.com/go-gitea/gitea/releases/tag/v1.7.5) - 2019-03-27
|
## [1.7.5](https://github.com/go-gitea/gitea/releases/tag/v1.7.5) - 2019-03-27
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Fix unitTypeCode not being used in accessLevelUnit (#6419) (#6423)
|
* Fix unitTypeCode not being used in accessLevelUnit (#6419) (#6423)
|
||||||
* Fix bug where manifest.json was being requested without cookies and continuously creating new sessions (#6372) (#6383)
|
* Fix bug where manifest.json was being requested without cookies and continuously creating new sessions (#6372) (#6383)
|
||||||
* Fix ParsePatch function to work with quoted diff --git strings (#6323) (#6332)
|
* Fix ParsePatch function to work with quoted diff --git strings (#6323) (#6332)
|
||||||
|
|
||||||
## [1.7.4](https://github.com/go-gitea/gitea/releases/tag/v1.7.4) - 2019-03-12
|
## [1.7.4](https://github.com/go-gitea/gitea/releases/tag/v1.7.4) - 2019-03-12
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Fix potential XSS vulnerability in repository description. (#6306) (#6308)
|
* Fix potential XSS vulnerability in repository description. (#6306) (#6308)
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
|
|
59
Makefile
59
Makefile
|
@ -29,6 +29,8 @@ EXTRA_GOFLAGS ?=
|
||||||
|
|
||||||
MAKE_VERSION := $(shell $(MAKE) -v | head -n 1)
|
MAKE_VERSION := $(shell $(MAKE) -v | head -n 1)
|
||||||
|
|
||||||
|
STORED_VERSION_FILE := VERSION
|
||||||
|
|
||||||
ifneq ($(DRONE_TAG),)
|
ifneq ($(DRONE_TAG),)
|
||||||
VERSION ?= $(subst v,,$(DRONE_TAG))
|
VERSION ?= $(subst v,,$(DRONE_TAG))
|
||||||
GITEA_VERSION ?= $(VERSION)
|
GITEA_VERSION ?= $(VERSION)
|
||||||
|
@ -38,7 +40,13 @@ else
|
||||||
else
|
else
|
||||||
VERSION ?= master
|
VERSION ?= master
|
||||||
endif
|
endif
|
||||||
GITEA_VERSION ?= $(shell git describe --tags --always | sed 's/-/+/' | sed 's/^v//')
|
|
||||||
|
STORED_VERSION=$(shell cat $(STORED_VERSION_FILE) 2>/dev/null)
|
||||||
|
ifneq ($(STORED_VERSION),)
|
||||||
|
GITEA_VERSION ?= $(STORED_VERSION)
|
||||||
|
else
|
||||||
|
GITEA_VERSION ?= $(shell git describe --tags --always | sed 's/-/+/' | sed 's/^v//')
|
||||||
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
||||||
LDFLAGS := $(LDFLAGS) -X "main.MakeVersion=$(MAKE_VERSION)" -X "main.Version=$(GITEA_VERSION)" -X "main.Tags=$(TAGS)"
|
LDFLAGS := $(LDFLAGS) -X "main.MakeVersion=$(MAKE_VERSION)" -X "main.Version=$(GITEA_VERSION)" -X "main.Tags=$(TAGS)"
|
||||||
|
@ -96,13 +104,15 @@ include docker/Makefile
|
||||||
help:
|
help:
|
||||||
@echo "Make Routines:"
|
@echo "Make Routines:"
|
||||||
@echo " - \"\" equivalent to \"build\""
|
@echo " - \"\" equivalent to \"build\""
|
||||||
@echo " - build creates the entire project"
|
@echo " - build build everything"
|
||||||
@echo " - clean delete integration files and build files but not css and js files"
|
@echo " - frontend build frontend files"
|
||||||
@echo " - clean-all delete all generated files (integration test, build, css and js files)"
|
@echo " - backend build backend files"
|
||||||
|
@echo " - clean delete backend and integration files"
|
||||||
|
@echo " - clean-all delete backend, frontend and integration files"
|
||||||
@echo " - css rebuild only css files"
|
@echo " - css rebuild only css files"
|
||||||
@echo " - js rebuild only js files"
|
@echo " - js rebuild only js files"
|
||||||
@echo " - generate run \"make css js\" and \"go generate\""
|
@echo " - generate run \"go generate\""
|
||||||
@echo " - fmt format the code"
|
@echo " - fmt format the Go code"
|
||||||
@echo " - generate-swagger generate the swagger spec from code comments"
|
@echo " - generate-swagger generate the swagger spec from code comments"
|
||||||
@echo " - swagger-validate check if the swagger spec is valide"
|
@echo " - swagger-validate check if the swagger spec is valide"
|
||||||
@echo " - revive run code linter revive"
|
@echo " - revive run code linter revive"
|
||||||
|
@ -113,12 +123,19 @@ help:
|
||||||
|
|
||||||
.PHONY: go-check
|
.PHONY: go-check
|
||||||
go-check:
|
go-check:
|
||||||
$(eval GO_VERSION := $(shell printf "%03d%03d%03d" $(shell go version | grep -Eo '[0-9]+\.?[0-9]+?\.?[0-9]?\s' | tr '.' ' ');))
|
$(eval GO_VERSION := $(shell printf "%03d%03d%03d" $(shell go version | grep -Eo '[0-9]+\.?[0-9]+?\.?[0-9]?[[:space:]]' | tr '.' ' ');))
|
||||||
@if [ "$(GO_VERSION)" -lt "001011000" ]; then \
|
@if [ "$(GO_VERSION)" -lt "001011000" ]; then \
|
||||||
echo "Gitea requires Go 1.11.0 or greater to build. You can get it at https://golang.org/dl/"; \
|
echo "Gitea requires Go 1.11.0 or greater to build. You can get it at https://golang.org/dl/"; \
|
||||||
exit 1; \
|
exit 1; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
.PHONY: git-check
|
||||||
|
git-check:
|
||||||
|
@if git lfs >/dev/null 2>&1 ; then : ; else \
|
||||||
|
echo "Gitea requires git with lfs support to run tests." ; \
|
||||||
|
exit 1; \
|
||||||
|
fi
|
||||||
|
|
||||||
.PHONY: node-check
|
.PHONY: node-check
|
||||||
node-check:
|
node-check:
|
||||||
$(eval NODE_VERSION := $(shell printf "%03d%03d%03d" $(shell node -v | grep -Eo '[0-9]+\.?[0-9]+?\.?[0-9]?' | tr '.' ' ');))
|
$(eval NODE_VERSION := $(shell printf "%03d%03d%03d" $(shell node -v | grep -Eo '[0-9]+\.?[0-9]+?\.?[0-9]?' | tr '.' ' ');))
|
||||||
|
@ -149,10 +166,6 @@ fmt:
|
||||||
vet:
|
vet:
|
||||||
$(GO) vet $(PACKAGES)
|
$(GO) vet $(PACKAGES)
|
||||||
|
|
||||||
.PHONY: generate
|
|
||||||
generate: js css
|
|
||||||
GO111MODULE=on $(GO) generate -mod=vendor $(PACKAGES)
|
|
||||||
|
|
||||||
.PHONY: generate-swagger
|
.PHONY: generate-swagger
|
||||||
generate-swagger:
|
generate-swagger:
|
||||||
$(SWAGGER) generate spec -o './$(SWAGGER_SPEC)'
|
$(SWAGGER) generate spec -o './$(SWAGGER_SPEC)'
|
||||||
|
@ -233,7 +246,7 @@ coverage:
|
||||||
|
|
||||||
.PHONY: unit-test-coverage
|
.PHONY: unit-test-coverage
|
||||||
unit-test-coverage:
|
unit-test-coverage:
|
||||||
$(GO) test -tags='sqlite sqlite_unlock_notify' -cover -coverprofile coverage.out $(PACKAGES) && echo "\n==>\033[32m Ok\033[m\n" || exit 1
|
GO111MODULE=on $(GO) test -mod=vendor -tags='sqlite sqlite_unlock_notify' -cover -coverprofile coverage.out $(PACKAGES) && echo "\n==>\033[32m Ok\033[m\n" || exit 1
|
||||||
|
|
||||||
.PHONY: vendor
|
.PHONY: vendor
|
||||||
vendor:
|
vendor:
|
||||||
|
@ -376,7 +389,7 @@ integrations.mssql.test: $(GO_SOURCES)
|
||||||
integrations.sqlite.test: $(GO_SOURCES)
|
integrations.sqlite.test: $(GO_SOURCES)
|
||||||
GO111MODULE=on $(GO) test -mod=vendor -c code.gitea.io/gitea/integrations -o integrations.sqlite.test -tags 'sqlite sqlite_unlock_notify'
|
GO111MODULE=on $(GO) test -mod=vendor -c code.gitea.io/gitea/integrations -o integrations.sqlite.test -tags 'sqlite sqlite_unlock_notify'
|
||||||
|
|
||||||
integrations.cover.test: $(GO_SOURCES)
|
integrations.cover.test: git-check $(GO_SOURCES)
|
||||||
GO111MODULE=on $(GO) test -mod=vendor -c code.gitea.io/gitea/integrations -coverpkg $(shell echo $(PACKAGES) | tr ' ' ',') -o integrations.cover.test
|
GO111MODULE=on $(GO) test -mod=vendor -c code.gitea.io/gitea/integrations -coverpkg $(shell echo $(PACKAGES) | tr ' ' ',') -o integrations.cover.test
|
||||||
|
|
||||||
.PHONY: migrations.mysql.test
|
.PHONY: migrations.mysql.test
|
||||||
|
@ -407,13 +420,23 @@ install: $(wildcard *.go)
|
||||||
$(GO) install -v -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)'
|
$(GO) install -v -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)'
|
||||||
|
|
||||||
.PHONY: build
|
.PHONY: build
|
||||||
build: go-check generate $(EXECUTABLE)
|
build: frontend backend
|
||||||
|
|
||||||
|
.PHONY: frontend
|
||||||
|
frontend: node-check js css
|
||||||
|
|
||||||
|
.PHONY: backend
|
||||||
|
backend: go-check generate $(EXECUTABLE)
|
||||||
|
|
||||||
|
.PHONY: generate
|
||||||
|
generate:
|
||||||
|
GO111MODULE=on $(GO) generate -mod=vendor $(PACKAGES)
|
||||||
|
|
||||||
$(EXECUTABLE): $(GO_SOURCES)
|
$(EXECUTABLE): $(GO_SOURCES)
|
||||||
GO111MODULE=on $(GO) build -mod=vendor $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)' -o $@
|
GO111MODULE=on $(GO) build -mod=vendor $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)' -o $@
|
||||||
|
|
||||||
.PHONY: release
|
.PHONY: release
|
||||||
release: generate release-dirs release-windows release-linux release-darwin release-copy release-compress release-check
|
release: frontend generate release-dirs release-windows release-linux release-darwin release-copy release-compress release-sources release-check
|
||||||
|
|
||||||
.PHONY: release-dirs
|
.PHONY: release-dirs
|
||||||
release-dirs:
|
release-dirs:
|
||||||
|
@ -464,6 +487,12 @@ release-compress:
|
||||||
fi
|
fi
|
||||||
cd $(DIST)/release/; for file in `find . -type f -name "*"`; do echo "compressing $${file}" && gxz -k -9 $${file}; done;
|
cd $(DIST)/release/; for file in `find . -type f -name "*"`; do echo "compressing $${file}" && gxz -k -9 $${file}; done;
|
||||||
|
|
||||||
|
.PHONY: release-sources
|
||||||
|
release-sources: | node_modules
|
||||||
|
echo $(VERSION) > $(STORED_VERSION_FILE)
|
||||||
|
tar --exclude=./$(DIST) --exclude=./.git --exclude=./node_modules/.cache -czf $(DIST)/release/gitea-src-$(VERSION).tar.gz .
|
||||||
|
rm -f $(STORED_VERSION_FILE)
|
||||||
|
|
||||||
node_modules: package-lock.json
|
node_modules: package-lock.json
|
||||||
npm install --no-save
|
npm install --no-save
|
||||||
|
|
||||||
|
|
|
@ -33,6 +33,15 @@ From the root of the source tree, run:
|
||||||
|
|
||||||
TAGS="bindata" make build
|
TAGS="bindata" make build
|
||||||
|
|
||||||
|
The `build` target is split into two sub-targets:
|
||||||
|
|
||||||
|
- `make backend` which requires [Go 1.11](https://golang.org/dl/) or greater.
|
||||||
|
- `make frontend` which requires [Node.js 10.0.0](https://nodejs.org/en/download/) or greater.
|
||||||
|
|
||||||
|
If pre-built frontend files are present it is possible to only build the backend:
|
||||||
|
|
||||||
|
TAGS="bindata" make backend
|
||||||
|
|
||||||
More info: https://docs.gitea.io/en-us/install-from-source/
|
More info: https://docs.gitea.io/en-us/install-from-source/
|
||||||
|
|
||||||
## Using
|
## Using
|
||||||
|
|
|
@ -61,6 +61,10 @@ var (
|
||||||
Name: "admin-filter",
|
Name: "admin-filter",
|
||||||
Usage: "An LDAP filter specifying if a user should be given administrator privileges.",
|
Usage: "An LDAP filter specifying if a user should be given administrator privileges.",
|
||||||
},
|
},
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "allow-deactivate-all",
|
||||||
|
Usage: "Allow empty search results to deactivate all users.",
|
||||||
|
},
|
||||||
cli.StringFlag{
|
cli.StringFlag{
|
||||||
Name: "username-attribute",
|
Name: "username-attribute",
|
||||||
Usage: "The attribute of the user’s LDAP record containing the user name.",
|
Usage: "The attribute of the user’s LDAP record containing the user name.",
|
||||||
|
@ -231,6 +235,9 @@ func parseLdapConfig(c *cli.Context, config *models.LDAPConfig) error {
|
||||||
if c.IsSet("admin-filter") {
|
if c.IsSet("admin-filter") {
|
||||||
config.Source.AdminFilter = c.String("admin-filter")
|
config.Source.AdminFilter = c.String("admin-filter")
|
||||||
}
|
}
|
||||||
|
if c.IsSet("allow-deactivate-all") {
|
||||||
|
config.Source.AllowDeactivateAll = c.Bool("allow-deactivate-all")
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -151,8 +151,10 @@ func runDump(ctx *cli.Context) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := z.AddDir("log", setting.LogRootPath); err != nil {
|
if com.IsExist(setting.LogRootPath) {
|
||||||
fatal("Failed to include log: %v", err)
|
if err := z.AddDir("log", setting.LogRootPath); err != nil {
|
||||||
|
fatal("Failed to include log: %v", err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = z.Close(); err != nil {
|
if err = z.Close(); err != nil {
|
||||||
|
|
148
cmd/hook.go
148
cmd/hook.go
|
@ -8,10 +8,12 @@ import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models"
|
"code.gitea.io/gitea/models"
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
|
@ -58,6 +60,85 @@ var (
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type delayWriter struct {
|
||||||
|
internal io.Writer
|
||||||
|
buf *bytes.Buffer
|
||||||
|
timer *time.Timer
|
||||||
|
}
|
||||||
|
|
||||||
|
func newDelayWriter(internal io.Writer, delay time.Duration) *delayWriter {
|
||||||
|
timer := time.NewTimer(delay)
|
||||||
|
return &delayWriter{
|
||||||
|
internal: internal,
|
||||||
|
buf: &bytes.Buffer{},
|
||||||
|
timer: timer,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *delayWriter) Write(p []byte) (n int, err error) {
|
||||||
|
if d.buf != nil {
|
||||||
|
select {
|
||||||
|
case <-d.timer.C:
|
||||||
|
_, err := d.internal.Write(d.buf.Bytes())
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
d.buf = nil
|
||||||
|
return d.internal.Write(p)
|
||||||
|
default:
|
||||||
|
return d.buf.Write(p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return d.internal.Write(p)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *delayWriter) WriteString(s string) (n int, err error) {
|
||||||
|
if d.buf != nil {
|
||||||
|
select {
|
||||||
|
case <-d.timer.C:
|
||||||
|
_, err := d.internal.Write(d.buf.Bytes())
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
d.buf = nil
|
||||||
|
return d.internal.Write([]byte(s))
|
||||||
|
default:
|
||||||
|
return d.buf.WriteString(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return d.internal.Write([]byte(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *delayWriter) Close() error {
|
||||||
|
if d == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
stopped := d.timer.Stop()
|
||||||
|
if stopped {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
select {
|
||||||
|
case <-d.timer.C:
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
if d.buf == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
_, err := d.internal.Write(d.buf.Bytes())
|
||||||
|
d.buf = nil
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
type nilWriter struct{}
|
||||||
|
|
||||||
|
func (n *nilWriter) Write(p []byte) (int, error) {
|
||||||
|
return len(p), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *nilWriter) WriteString(s string) (int, error) {
|
||||||
|
return len(s), nil
|
||||||
|
}
|
||||||
|
|
||||||
func runHookPreReceive(c *cli.Context) error {
|
func runHookPreReceive(c *cli.Context) error {
|
||||||
if os.Getenv(models.EnvIsInternal) == "true" {
|
if os.Getenv(models.EnvIsInternal) == "true" {
|
||||||
return nil
|
return nil
|
||||||
|
@ -101,6 +182,18 @@ Gitea or set your environment appropriately.`, "")
|
||||||
total := 0
|
total := 0
|
||||||
lastline := 0
|
lastline := 0
|
||||||
|
|
||||||
|
var out io.Writer
|
||||||
|
out = &nilWriter{}
|
||||||
|
if setting.Git.VerbosePush {
|
||||||
|
if setting.Git.VerbosePushDelay > 0 {
|
||||||
|
dWriter := newDelayWriter(os.Stdout, setting.Git.VerbosePushDelay)
|
||||||
|
defer dWriter.Close()
|
||||||
|
out = dWriter
|
||||||
|
} else {
|
||||||
|
out = os.Stdout
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for scanner.Scan() {
|
for scanner.Scan() {
|
||||||
// TODO: support news feeds for wiki
|
// TODO: support news feeds for wiki
|
||||||
if isWiki {
|
if isWiki {
|
||||||
|
@ -124,12 +217,10 @@ Gitea or set your environment appropriately.`, "")
|
||||||
newCommitIDs[count] = newCommitID
|
newCommitIDs[count] = newCommitID
|
||||||
refFullNames[count] = refFullName
|
refFullNames[count] = refFullName
|
||||||
count++
|
count++
|
||||||
fmt.Fprintf(os.Stdout, "*")
|
fmt.Fprintf(out, "*")
|
||||||
os.Stdout.Sync()
|
|
||||||
|
|
||||||
if count >= hookBatchSize {
|
if count >= hookBatchSize {
|
||||||
fmt.Fprintf(os.Stdout, " Checking %d branches\n", count)
|
fmt.Fprintf(out, " Checking %d branches\n", count)
|
||||||
os.Stdout.Sync()
|
|
||||||
|
|
||||||
hookOptions.OldCommitIDs = oldCommitIDs
|
hookOptions.OldCommitIDs = oldCommitIDs
|
||||||
hookOptions.NewCommitIDs = newCommitIDs
|
hookOptions.NewCommitIDs = newCommitIDs
|
||||||
|
@ -147,12 +238,10 @@ Gitea or set your environment appropriately.`, "")
|
||||||
lastline = 0
|
lastline = 0
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
fmt.Fprintf(os.Stdout, ".")
|
fmt.Fprintf(out, ".")
|
||||||
os.Stdout.Sync()
|
|
||||||
}
|
}
|
||||||
if lastline >= hookBatchSize {
|
if lastline >= hookBatchSize {
|
||||||
fmt.Fprintf(os.Stdout, "\n")
|
fmt.Fprintf(out, "\n")
|
||||||
os.Stdout.Sync()
|
|
||||||
lastline = 0
|
lastline = 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -162,8 +251,7 @@ Gitea or set your environment appropriately.`, "")
|
||||||
hookOptions.NewCommitIDs = newCommitIDs[:count]
|
hookOptions.NewCommitIDs = newCommitIDs[:count]
|
||||||
hookOptions.RefFullNames = refFullNames[:count]
|
hookOptions.RefFullNames = refFullNames[:count]
|
||||||
|
|
||||||
fmt.Fprintf(os.Stdout, " Checking %d branches\n", count)
|
fmt.Fprintf(out, " Checking %d branches\n", count)
|
||||||
os.Stdout.Sync()
|
|
||||||
|
|
||||||
statusCode, msg := private.HookPreReceive(username, reponame, hookOptions)
|
statusCode, msg := private.HookPreReceive(username, reponame, hookOptions)
|
||||||
switch statusCode {
|
switch statusCode {
|
||||||
|
@ -173,14 +261,11 @@ Gitea or set your environment appropriately.`, "")
|
||||||
fail(msg, "")
|
fail(msg, "")
|
||||||
}
|
}
|
||||||
} else if lastline > 0 {
|
} else if lastline > 0 {
|
||||||
fmt.Fprintf(os.Stdout, "\n")
|
fmt.Fprintf(out, "\n")
|
||||||
os.Stdout.Sync()
|
|
||||||
lastline = 0
|
lastline = 0
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Fprintf(os.Stdout, "Checked %d references in total\n", total)
|
fmt.Fprintf(out, "Checked %d references in total\n", total)
|
||||||
os.Stdout.Sync()
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -206,6 +291,19 @@ Gitea or set your environment appropriately.`, "")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var out io.Writer
|
||||||
|
var dWriter *delayWriter
|
||||||
|
out = &nilWriter{}
|
||||||
|
if setting.Git.VerbosePush {
|
||||||
|
if setting.Git.VerbosePushDelay > 0 {
|
||||||
|
dWriter = newDelayWriter(os.Stdout, setting.Git.VerbosePushDelay)
|
||||||
|
defer dWriter.Close()
|
||||||
|
out = dWriter
|
||||||
|
} else {
|
||||||
|
out = os.Stdout
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// the environment setted on serv command
|
// the environment setted on serv command
|
||||||
repoUser := os.Getenv(models.EnvRepoUsername)
|
repoUser := os.Getenv(models.EnvRepoUsername)
|
||||||
isWiki := (os.Getenv(models.EnvRepoIsWiki) == "true")
|
isWiki := (os.Getenv(models.EnvRepoIsWiki) == "true")
|
||||||
|
@ -241,7 +339,7 @@ Gitea or set your environment appropriately.`, "")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Fprintf(os.Stdout, ".")
|
fmt.Fprintf(out, ".")
|
||||||
oldCommitIDs[count] = string(fields[0])
|
oldCommitIDs[count] = string(fields[0])
|
||||||
newCommitIDs[count] = string(fields[1])
|
newCommitIDs[count] = string(fields[1])
|
||||||
refFullNames[count] = string(fields[2])
|
refFullNames[count] = string(fields[2])
|
||||||
|
@ -250,16 +348,15 @@ Gitea or set your environment appropriately.`, "")
|
||||||
}
|
}
|
||||||
count++
|
count++
|
||||||
total++
|
total++
|
||||||
os.Stdout.Sync()
|
|
||||||
|
|
||||||
if count >= hookBatchSize {
|
if count >= hookBatchSize {
|
||||||
fmt.Fprintf(os.Stdout, " Processing %d references\n", count)
|
fmt.Fprintf(out, " Processing %d references\n", count)
|
||||||
os.Stdout.Sync()
|
|
||||||
hookOptions.OldCommitIDs = oldCommitIDs
|
hookOptions.OldCommitIDs = oldCommitIDs
|
||||||
hookOptions.NewCommitIDs = newCommitIDs
|
hookOptions.NewCommitIDs = newCommitIDs
|
||||||
hookOptions.RefFullNames = refFullNames
|
hookOptions.RefFullNames = refFullNames
|
||||||
resp, err := private.HookPostReceive(repoUser, repoName, hookOptions)
|
resp, err := private.HookPostReceive(repoUser, repoName, hookOptions)
|
||||||
if resp == nil {
|
if resp == nil {
|
||||||
|
_ = dWriter.Close()
|
||||||
hookPrintResults(results)
|
hookPrintResults(results)
|
||||||
fail("Internal Server Error", err)
|
fail("Internal Server Error", err)
|
||||||
}
|
}
|
||||||
|
@ -277,9 +374,9 @@ Gitea or set your environment appropriately.`, "")
|
||||||
fail("Internal Server Error", "SetDefaultBranch failed with Error: %v", err)
|
fail("Internal Server Error", "SetDefaultBranch failed with Error: %v", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fmt.Fprintf(os.Stdout, "Processed %d references in total\n", total)
|
fmt.Fprintf(out, "Processed %d references in total\n", total)
|
||||||
os.Stdout.Sync()
|
|
||||||
|
|
||||||
|
_ = dWriter.Close()
|
||||||
hookPrintResults(results)
|
hookPrintResults(results)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -288,19 +385,18 @@ Gitea or set your environment appropriately.`, "")
|
||||||
hookOptions.NewCommitIDs = newCommitIDs[:count]
|
hookOptions.NewCommitIDs = newCommitIDs[:count]
|
||||||
hookOptions.RefFullNames = refFullNames[:count]
|
hookOptions.RefFullNames = refFullNames[:count]
|
||||||
|
|
||||||
fmt.Fprintf(os.Stdout, " Processing %d references\n", count)
|
fmt.Fprintf(out, " Processing %d references\n", count)
|
||||||
os.Stdout.Sync()
|
|
||||||
|
|
||||||
resp, err := private.HookPostReceive(repoUser, repoName, hookOptions)
|
resp, err := private.HookPostReceive(repoUser, repoName, hookOptions)
|
||||||
if resp == nil {
|
if resp == nil {
|
||||||
|
_ = dWriter.Close()
|
||||||
hookPrintResults(results)
|
hookPrintResults(results)
|
||||||
fail("Internal Server Error", err)
|
fail("Internal Server Error", err)
|
||||||
}
|
}
|
||||||
wasEmpty = wasEmpty || resp.RepoWasEmpty
|
wasEmpty = wasEmpty || resp.RepoWasEmpty
|
||||||
results = append(results, resp.Results...)
|
results = append(results, resp.Results...)
|
||||||
|
|
||||||
fmt.Fprintf(os.Stdout, "Processed %d references in total\n", total)
|
fmt.Fprintf(out, "Processed %d references in total\n", total)
|
||||||
os.Stdout.Sync()
|
|
||||||
|
|
||||||
if wasEmpty && masterPushed {
|
if wasEmpty && masterPushed {
|
||||||
// We need to tell the repo to reset the default branch to master
|
// We need to tell the repo to reset the default branch to master
|
||||||
|
@ -309,7 +405,7 @@ Gitea or set your environment appropriately.`, "")
|
||||||
fail("Internal Server Error", "SetDefaultBranch failed with Error: %v", err)
|
fail("Internal Server Error", "SetDefaultBranch failed with Error: %v", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
_ = dWriter.Close()
|
||||||
hookPrintResults(results)
|
hookPrintResults(results)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -12,6 +12,7 @@ import (
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
@ -72,6 +73,7 @@ var (
|
||||||
"git-receive-pack": models.AccessModeWrite,
|
"git-receive-pack": models.AccessModeWrite,
|
||||||
lfsAuthenticateVerb: models.AccessModeNone,
|
lfsAuthenticateVerb: models.AccessModeNone,
|
||||||
}
|
}
|
||||||
|
alphaDashDotPattern = regexp.MustCompile(`[^\w-\.]`)
|
||||||
)
|
)
|
||||||
|
|
||||||
func fail(userMessage, logMessage string, args ...interface{}) {
|
func fail(userMessage, logMessage string, args ...interface{}) {
|
||||||
|
@ -147,6 +149,10 @@ func runServ(c *cli.Context) error {
|
||||||
username := strings.ToLower(rr[0])
|
username := strings.ToLower(rr[0])
|
||||||
reponame := strings.ToLower(strings.TrimSuffix(rr[1], ".git"))
|
reponame := strings.ToLower(strings.TrimSuffix(rr[1], ".git"))
|
||||||
|
|
||||||
|
if alphaDashDotPattern.MatchString(reponame) {
|
||||||
|
fail("Invalid repo name", "Invalid repo name: %s", reponame)
|
||||||
|
}
|
||||||
|
|
||||||
if setting.EnablePprof || c.Bool("enable-pprof") {
|
if setting.EnablePprof || c.Bool("enable-pprof") {
|
||||||
if err := os.MkdirAll(setting.PprofDataPath, os.ModePerm); err != nil {
|
if err := os.MkdirAll(setting.PprofDataPath, os.ModePerm); err != nil {
|
||||||
fail("Error while trying to create PPROF_DATA_PATH", "Error while trying to create PPROF_DATA_PATH: %v", err)
|
fail("Error while trying to create PPROF_DATA_PATH", "Error while trying to create PPROF_DATA_PATH: %v", err)
|
||||||
|
|
|
@ -275,8 +275,9 @@ DISABLE_ROUTER_LOG = false
|
||||||
; not forget to export the private key):
|
; not forget to export the private key):
|
||||||
; $ openssl pkcs12 -in cert.pfx -out cert.pem -nokeys
|
; $ openssl pkcs12 -in cert.pfx -out cert.pem -nokeys
|
||||||
; $ openssl pkcs12 -in cert.pfx -out key.pem -nocerts -nodes
|
; $ openssl pkcs12 -in cert.pfx -out key.pem -nocerts -nodes
|
||||||
CERT_FILE = custom/https/cert.pem
|
; Paths are relative to CUSTOM_PATH
|
||||||
KEY_FILE = custom/https/key.pem
|
CERT_FILE = https/cert.pem
|
||||||
|
KEY_FILE = https/key.pem
|
||||||
; Root directory containing templates and static files.
|
; Root directory containing templates and static files.
|
||||||
; default is the path where Gitea is executed
|
; default is the path where Gitea is executed
|
||||||
STATIC_ROOT_PATH =
|
STATIC_ROOT_PATH =
|
||||||
|
|
|
@ -18,7 +18,7 @@ params:
|
||||||
description: Git with a cup of tea
|
description: Git with a cup of tea
|
||||||
author: The Gitea Authors
|
author: The Gitea Authors
|
||||||
website: https://docs.gitea.io
|
website: https://docs.gitea.io
|
||||||
version: 1.10.2
|
version: 1.11.0
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
home:
|
home:
|
||||||
|
|
|
@ -181,8 +181,8 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`.
|
||||||
- `SSH_LISTEN_PORT`: **%(SSH\_PORT)s**: Port for the built-in SSH server.
|
- `SSH_LISTEN_PORT`: **%(SSH\_PORT)s**: Port for the built-in SSH server.
|
||||||
- `OFFLINE_MODE`: **false**: Disables use of CDN for static files and Gravatar for profile pictures.
|
- `OFFLINE_MODE`: **false**: Disables use of CDN for static files and Gravatar for profile pictures.
|
||||||
- `DISABLE_ROUTER_LOG`: **false**: Mute printing of the router log.
|
- `DISABLE_ROUTER_LOG`: **false**: Mute printing of the router log.
|
||||||
- `CERT_FILE`: **custom/https/cert.pem**: Cert file path used for HTTPS.
|
- `CERT_FILE`: **https/cert.pem**: Cert file path used for HTTPS. From 1.11 paths are relative to `CUSTOM_PATH`.
|
||||||
- `KEY_FILE`: **custom/https/key.pem**: Key file path used for HTTPS.
|
- `KEY_FILE`: **https/key.pem**: Key file path used for HTTPS. From 1.11 paths are relative to `CUSTOM_PATH`.
|
||||||
- `STATIC_ROOT_PATH`: **./**: Upper level of template and static files path.
|
- `STATIC_ROOT_PATH`: **./**: Upper level of template and static files path.
|
||||||
- `STATIC_CACHE_TIME`: **6h**: Web browser cache time for static resources on `custom/`, `public/` and all uploaded avatars.
|
- `STATIC_CACHE_TIME`: **6h**: Web browser cache time for static resources on `custom/`, `public/` and all uploaded avatars.
|
||||||
- `ENABLE_GZIP`: **false**: Enables application-level GZIP support.
|
- `ENABLE_GZIP`: **false**: Enables application-level GZIP support.
|
||||||
|
@ -522,6 +522,8 @@ NB: You must `REDIRECT_MACARON_LOG` and have `DISABLE_ROUTER_LOG` set to `false`
|
||||||
- `MAX_GIT_DIFF_FILES`: **100**: Max number of files shown in diff view.
|
- `MAX_GIT_DIFF_FILES`: **100**: Max number of files shown in diff view.
|
||||||
- `GC_ARGS`: **\<empty\>**: Arguments for command `git gc`, e.g. `--aggressive --auto`. See more on http://git-scm.com/docs/git-gc/
|
- `GC_ARGS`: **\<empty\>**: Arguments for command `git gc`, e.g. `--aggressive --auto`. See more on http://git-scm.com/docs/git-gc/
|
||||||
- `ENABLE_AUTO_GIT_WIRE_PROTOCOL`: **true**: If use git wire protocol version 2 when git version >= 2.18, default is true, set to false when you always want git wire protocol version 1
|
- `ENABLE_AUTO_GIT_WIRE_PROTOCOL`: **true**: If use git wire protocol version 2 when git version >= 2.18, default is true, set to false when you always want git wire protocol version 1
|
||||||
|
- `VERBOSE_PUSH`: **true**: Print status information about pushes as they are being processed.
|
||||||
|
- `VERBOSE_PUSH_DELAY`: **5s**: Only print verbose information if push takes longer than this delay.
|
||||||
|
|
||||||
## Git - Timeout settings (`git.timeout`)
|
## Git - Timeout settings (`git.timeout`)
|
||||||
- `DEFAUlT`: **360**: Git operations default timeout seconds.
|
- `DEFAUlT`: **360**: Git operations default timeout seconds.
|
||||||
|
|
|
@ -60,7 +60,7 @@ _Symbols used in table:_
|
||||||
| Git LFS 2.0 | ✓ | ✘ | ✓ | ✓ | ✓ | ⁄ | ✓ |
|
| Git LFS 2.0 | ✓ | ✘ | ✓ | ✓ | ✓ | ⁄ | ✓ |
|
||||||
| Group Milestones | ✘ | ✘ | ✘ | ✓ | ✓ | ✘ | ✘ |
|
| Group Milestones | ✘ | ✘ | ✘ | ✓ | ✓ | ✘ | ✘ |
|
||||||
| Granular user roles (Code, Issues, Wiki etc) | ✓ | ✘ | ✘ | ✓ | ✓ | ✘ | ✘ |
|
| Granular user roles (Code, Issues, Wiki etc) | ✓ | ✘ | ✘ | ✓ | ✓ | ✘ | ✘ |
|
||||||
| Verified Committer | ✘ | ✘ | ? | ✓ | ✓ | ✓ | ✘ |
|
| Verified Committer | ⁄ | ✘ | ? | ✓ | ✓ | ✓ | ✘ |
|
||||||
| GPG Signed Commits | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
| GPG Signed Commits | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||||
| Reject unsigned commits | [✘](https://github.com/go-gitea/gitea/issues/2770) | ✘ | ✓ | ✓ | ✓ | ✘ | ✓ |
|
| Reject unsigned commits | [✘](https://github.com/go-gitea/gitea/issues/2770) | ✘ | ✓ | ✓ | ✓ | ✘ | ✓ |
|
||||||
| Repository Activity page | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
| Repository Activity page | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||||
|
|
|
@ -114,6 +114,17 @@ recommended way to build from source is therefore:
|
||||||
TAGS="bindata sqlite sqlite_unlock_notify" make build
|
TAGS="bindata sqlite sqlite_unlock_notify" make build
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The `build` target is split into two sub-targets:
|
||||||
|
|
||||||
|
- `make backend` which requires [Go 1.11](https://golang.org/dl/) or greater.
|
||||||
|
- `make frontend` which requires [Node.js 10.0.0](https://nodejs.org/en/download/) or greater.
|
||||||
|
|
||||||
|
If pre-built frontend files are present it is possible to only build the backend:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
TAGS="bindata" make backend
|
||||||
|
``
|
||||||
|
|
||||||
## Test
|
## Test
|
||||||
|
|
||||||
After following the steps above, a `gitea` binary will be available in the working directory.
|
After following the steps above, a `gitea` binary will be available in the working directory.
|
||||||
|
|
|
@ -136,7 +136,8 @@ the `!` marker to identify pull requests. For example:
|
||||||
> This is pull request [!1234](#), and links to a pull request in Gitea.
|
> This is pull request [!1234](#), and links to a pull request in Gitea.
|
||||||
|
|
||||||
The `!` and `#` can be used interchangeably for issues and pull request _except_
|
The `!` and `#` can be used interchangeably for issues and pull request _except_
|
||||||
for this case, where a distinction is required.
|
for this case, where a distinction is required. If the repository uses external
|
||||||
|
tracker, commit message for squash merge will use `!` as reference by default.
|
||||||
|
|
||||||
## Issues and Pull Requests References Summary
|
## Issues and Pull Requests References Summary
|
||||||
|
|
||||||
|
|
6
go.mod
6
go.mod
|
@ -62,7 +62,7 @@ require (
|
||||||
github.com/lib/pq v1.2.0
|
github.com/lib/pq v1.2.0
|
||||||
github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96
|
github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96
|
||||||
github.com/mailru/easyjson v0.7.0 // indirect
|
github.com/mailru/easyjson v0.7.0 // indirect
|
||||||
github.com/markbates/goth v1.56.0
|
github.com/markbates/goth v1.61.2
|
||||||
github.com/mattn/go-isatty v0.0.7
|
github.com/mattn/go-isatty v0.0.7
|
||||||
github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d // indirect
|
github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d // indirect
|
||||||
github.com/mattn/go-sqlite3 v1.11.0
|
github.com/mattn/go-sqlite3 v1.11.0
|
||||||
|
@ -95,10 +95,10 @@ require (
|
||||||
github.com/yohcop/openid-go v0.0.0-20160914080427-2c050d2dae53
|
github.com/yohcop/openid-go v0.0.0-20160914080427-2c050d2dae53
|
||||||
github.com/yuin/goldmark v1.1.19
|
github.com/yuin/goldmark v1.1.19
|
||||||
go.etcd.io/bbolt v1.3.3 // indirect
|
go.etcd.io/bbolt v1.3.3 // indirect
|
||||||
golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876
|
golang.org/x/crypto v0.0.0-20200219234226-1ad67e1f0ef4
|
||||||
golang.org/x/net v0.0.0-20191101175033-0deb6923b6d9
|
golang.org/x/net v0.0.0-20191101175033-0deb6923b6d9
|
||||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
|
||||||
golang.org/x/sys v0.0.0-20191127021746-63cb32ae39b2
|
golang.org/x/sys v0.0.0-20200219091948-cb0a6d8edb6c
|
||||||
golang.org/x/text v0.3.2
|
golang.org/x/text v0.3.2
|
||||||
golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935 // indirect
|
golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935 // indirect
|
||||||
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
|
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
|
||||||
|
|
13
go.sum
13
go.sum
|
@ -351,6 +351,7 @@ github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
github.com/lafriks/xormstore v1.3.2 h1:hqi3F8s/B4rz8GuEZZDuHuOxRjeuOpEI/cC7vcnWwH4=
|
github.com/lafriks/xormstore v1.3.2 h1:hqi3F8s/B4rz8GuEZZDuHuOxRjeuOpEI/cC7vcnWwH4=
|
||||||
github.com/lafriks/xormstore v1.3.2/go.mod h1:mVNIwIa25QIr8rfR7YlVjrqN/apswHkVdtLCyVYBzXw=
|
github.com/lafriks/xormstore v1.3.2/go.mod h1:mVNIwIa25QIr8rfR7YlVjrqN/apswHkVdtLCyVYBzXw=
|
||||||
|
github.com/lestrrat-go/jwx v0.9.0/go.mod h1:iEoxlYfZjvoGpuWwxUz+eR5e6KTJGsaRcy/YNA/UnBk=
|
||||||
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||||
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
|
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
|
||||||
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||||
|
@ -370,8 +371,8 @@ github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN
|
||||||
github.com/mailru/easyjson v0.7.0 h1:aizVhC/NAAcKWb+5QsU1iNOZb4Yws5UO2I+aIprQITM=
|
github.com/mailru/easyjson v0.7.0 h1:aizVhC/NAAcKWb+5QsU1iNOZb4Yws5UO2I+aIprQITM=
|
||||||
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
|
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
|
||||||
github.com/markbates/going v1.0.0/go.mod h1:I6mnB4BPnEeqo85ynXIx1ZFLLbtiLHNXVgWeFO9OGOA=
|
github.com/markbates/going v1.0.0/go.mod h1:I6mnB4BPnEeqo85ynXIx1ZFLLbtiLHNXVgWeFO9OGOA=
|
||||||
github.com/markbates/goth v1.56.0 h1:XEYedCgMNz5pi3ojXI8z2XUmXtBnMeuKUpx4Z6HlNj8=
|
github.com/markbates/goth v1.61.2 h1:jDowrUH5qw8KGuQdKwFhLzkXkTYCIPfz3LHADJsiPIs=
|
||||||
github.com/markbates/goth v1.56.0/go.mod h1:zZmAw0Es0Dpm7TT/4AdN14QrkiWLMrrU9Xei1o+/mdA=
|
github.com/markbates/goth v1.61.2/go.mod h1:qh2QfwZoWRucQ+DR5KVKC6dUGkNCToWh4vS45GIzFsY=
|
||||||
github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
|
github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
|
||||||
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
||||||
github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d h1:m+dSK37rFf2fqppZhg15yI2IwC9BtucBiRwSDm9VL8g=
|
github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d h1:m+dSK37rFf2fqppZhg15yI2IwC9BtucBiRwSDm9VL8g=
|
||||||
|
@ -581,8 +582,8 @@ golang.org/x/crypto v0.0.0-20190907121410-71b5226ff739/go.mod h1:yigFU9vqHzYiE8U
|
||||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad h1:5E5raQxcv+6CZ11RrBYQe5WRbUIWpScjh0kvHZkZIrQ=
|
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad h1:5E5raQxcv+6CZ11RrBYQe5WRbUIWpScjh0kvHZkZIrQ=
|
||||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876 h1:sKJQZMuxjOAR/Uo2LBfU90onWEf1dF4C+0hPJCc9Mpc=
|
golang.org/x/crypto v0.0.0-20200219234226-1ad67e1f0ef4 h1:4icQlpeqbz3WxfgP6Eq3szTj95KTrlH/CwzBzoxuFd0=
|
||||||
golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200219234226-1ad67e1f0ef4/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||||
|
@ -652,8 +653,8 @@ golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20190907184412-d223b2b6db03/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190907184412-d223b2b6db03/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191010194322-b09406accb47 h1:/XfQ9z7ib8eEJX2hdgFTZJ/ntt0swNk5oYBziWeTCvY=
|
golang.org/x/sys v0.0.0-20191010194322-b09406accb47 h1:/XfQ9z7ib8eEJX2hdgFTZJ/ntt0swNk5oYBziWeTCvY=
|
||||||
golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191127021746-63cb32ae39b2 h1:/J2nHFg1MTqaRLFO7M+J78ASNsJoz3r0cvHBPQ77fsE=
|
golang.org/x/sys v0.0.0-20200219091948-cb0a6d8edb6c h1:jceGD5YNJGgGMkJz79agzOln1K9TaZUjv5ird16qniQ=
|
||||||
golang.org/x/sys v0.0.0-20191127021746-63cb32ae39b2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200219091948-cb0a6d8edb6c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
||||||
|
|
|
@ -0,0 +1,47 @@
|
||||||
|
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package integrations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models"
|
||||||
|
"code.gitea.io/gitea/modules/structs"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAPIIssuesMilestone(t *testing.T) {
|
||||||
|
defer prepareTestEnv(t)()
|
||||||
|
|
||||||
|
milestone := models.AssertExistsAndLoadBean(t, &models.Milestone{ID: 1}).(*models.Milestone)
|
||||||
|
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: milestone.RepoID}).(*models.Repository)
|
||||||
|
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||||
|
assert.Equal(t, int64(1), int64(milestone.NumIssues))
|
||||||
|
assert.Equal(t, structs.StateOpen, milestone.State())
|
||||||
|
|
||||||
|
session := loginUser(t, owner.Name)
|
||||||
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
|
||||||
|
// update values of issue
|
||||||
|
milestoneState := "closed"
|
||||||
|
|
||||||
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/milestones/%d?token=%s", owner.Name, repo.Name, milestone.ID, token)
|
||||||
|
req := NewRequestWithJSON(t, "PATCH", urlStr, structs.EditMilestoneOption{
|
||||||
|
State: &milestoneState,
|
||||||
|
})
|
||||||
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
var apiMilestone structs.Milestone
|
||||||
|
DecodeJSON(t, resp, &apiMilestone)
|
||||||
|
assert.EqualValues(t, "closed", apiMilestone.State)
|
||||||
|
|
||||||
|
req = NewRequest(t, "GET", urlStr)
|
||||||
|
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
var apiMilestone2 structs.Milestone
|
||||||
|
DecodeJSON(t, resp, &apiMilestone2)
|
||||||
|
assert.EqualValues(t, "closed", apiMilestone2.State)
|
||||||
|
}
|
|
@ -7,6 +7,7 @@ package integrations
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"net/url"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models"
|
"code.gitea.io/gitea/models"
|
||||||
|
@ -38,12 +39,12 @@ func TestAPICreateIssue(t *testing.T) {
|
||||||
defer prepareTestEnv(t)()
|
defer prepareTestEnv(t)()
|
||||||
const body, title = "apiTestBody", "apiTestTitle"
|
const body, title = "apiTestBody", "apiTestTitle"
|
||||||
|
|
||||||
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
|
repoBefore := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
|
||||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repoBefore.OwnerID}).(*models.User)
|
||||||
|
|
||||||
session := loginUser(t, owner.Name)
|
session := loginUser(t, owner.Name)
|
||||||
token := getTokenForLoggedInUser(t, session)
|
token := getTokenForLoggedInUser(t, session)
|
||||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues?state=all&token=%s", owner.Name, repo.Name, token)
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues?state=all&token=%s", owner.Name, repoBefore.Name, token)
|
||||||
req := NewRequestWithJSON(t, "POST", urlStr, &api.CreateIssueOption{
|
req := NewRequestWithJSON(t, "POST", urlStr, &api.CreateIssueOption{
|
||||||
Body: body,
|
Body: body,
|
||||||
Title: title,
|
Title: title,
|
||||||
|
@ -56,19 +57,23 @@ func TestAPICreateIssue(t *testing.T) {
|
||||||
assert.Equal(t, apiIssue.Title, title)
|
assert.Equal(t, apiIssue.Title, title)
|
||||||
|
|
||||||
models.AssertExistsAndLoadBean(t, &models.Issue{
|
models.AssertExistsAndLoadBean(t, &models.Issue{
|
||||||
RepoID: repo.ID,
|
RepoID: repoBefore.ID,
|
||||||
AssigneeID: owner.ID,
|
AssigneeID: owner.ID,
|
||||||
Content: body,
|
Content: body,
|
||||||
Title: title,
|
Title: title,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
repoAfter := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
|
||||||
|
assert.Equal(t, repoBefore.NumIssues+1, repoAfter.NumIssues)
|
||||||
|
assert.Equal(t, repoBefore.NumClosedIssues, repoAfter.NumClosedIssues)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAPIEditIssue(t *testing.T) {
|
func TestAPIEditIssue(t *testing.T) {
|
||||||
defer prepareTestEnv(t)()
|
defer prepareTestEnv(t)()
|
||||||
|
|
||||||
issueBefore := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 10}).(*models.Issue)
|
issueBefore := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 10}).(*models.Issue)
|
||||||
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: issueBefore.RepoID}).(*models.Repository)
|
repoBefore := models.AssertExistsAndLoadBean(t, &models.Repository{ID: issueBefore.RepoID}).(*models.Repository)
|
||||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repoBefore.OwnerID}).(*models.User)
|
||||||
assert.NoError(t, issueBefore.LoadAttributes())
|
assert.NoError(t, issueBefore.LoadAttributes())
|
||||||
assert.Equal(t, int64(1019307200), int64(issueBefore.DeadlineUnix))
|
assert.Equal(t, int64(1019307200), int64(issueBefore.DeadlineUnix))
|
||||||
assert.Equal(t, api.StateOpen, issueBefore.State())
|
assert.Equal(t, api.StateOpen, issueBefore.State())
|
||||||
|
@ -83,7 +88,7 @@ func TestAPIEditIssue(t *testing.T) {
|
||||||
body := "new content!"
|
body := "new content!"
|
||||||
title := "new title from api set"
|
title := "new title from api set"
|
||||||
|
|
||||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d?token=%s", owner.Name, repo.Name, issueBefore.Index, token)
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d?token=%s", owner.Name, repoBefore.Name, issueBefore.Index, token)
|
||||||
req := NewRequestWithJSON(t, "PATCH", urlStr, api.EditIssueOption{
|
req := NewRequestWithJSON(t, "PATCH", urlStr, api.EditIssueOption{
|
||||||
State: &issueState,
|
State: &issueState,
|
||||||
RemoveDeadline: &removeDeadline,
|
RemoveDeadline: &removeDeadline,
|
||||||
|
@ -98,6 +103,7 @@ func TestAPIEditIssue(t *testing.T) {
|
||||||
DecodeJSON(t, resp, &apiIssue)
|
DecodeJSON(t, resp, &apiIssue)
|
||||||
|
|
||||||
issueAfter := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 10}).(*models.Issue)
|
issueAfter := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 10}).(*models.Issue)
|
||||||
|
repoAfter := models.AssertExistsAndLoadBean(t, &models.Repository{ID: issueBefore.RepoID}).(*models.Repository)
|
||||||
|
|
||||||
// check deleted user
|
// check deleted user
|
||||||
assert.Equal(t, int64(500), issueAfter.PosterID)
|
assert.Equal(t, int64(500), issueAfter.PosterID)
|
||||||
|
@ -106,6 +112,9 @@ func TestAPIEditIssue(t *testing.T) {
|
||||||
assert.Equal(t, int64(-1), issueBefore.PosterID)
|
assert.Equal(t, int64(-1), issueBefore.PosterID)
|
||||||
assert.Equal(t, int64(-1), apiIssue.Poster.ID)
|
assert.Equal(t, int64(-1), apiIssue.Poster.ID)
|
||||||
|
|
||||||
|
// check repo change
|
||||||
|
assert.Equal(t, repoBefore.NumClosedIssues+1, repoAfter.NumClosedIssues)
|
||||||
|
|
||||||
// API response
|
// API response
|
||||||
assert.Equal(t, api.StateClosed, apiIssue.State)
|
assert.Equal(t, api.StateClosed, apiIssue.State)
|
||||||
assert.Equal(t, milestone, apiIssue.Milestone.ID)
|
assert.Equal(t, milestone, apiIssue.Milestone.ID)
|
||||||
|
@ -120,3 +129,47 @@ func TestAPIEditIssue(t *testing.T) {
|
||||||
assert.Equal(t, body, issueAfter.Content)
|
assert.Equal(t, body, issueAfter.Content)
|
||||||
assert.Equal(t, title, issueAfter.Title)
|
assert.Equal(t, title, issueAfter.Title)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestAPISearchIssue(t *testing.T) {
|
||||||
|
defer prepareTestEnv(t)()
|
||||||
|
|
||||||
|
session := loginUser(t, "user2")
|
||||||
|
token := getTokenForLoggedInUser(t, session)
|
||||||
|
|
||||||
|
link, _ := url.Parse("/api/v1/repos/issues/search")
|
||||||
|
req := NewRequest(t, "GET", link.String())
|
||||||
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
var apiIssues []*api.Issue
|
||||||
|
DecodeJSON(t, resp, &apiIssues)
|
||||||
|
|
||||||
|
assert.Len(t, apiIssues, 8)
|
||||||
|
|
||||||
|
query := url.Values{}
|
||||||
|
query.Add("token", token)
|
||||||
|
link.RawQuery = query.Encode()
|
||||||
|
req = NewRequest(t, "GET", link.String())
|
||||||
|
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
DecodeJSON(t, resp, &apiIssues)
|
||||||
|
assert.Len(t, apiIssues, 8)
|
||||||
|
|
||||||
|
query.Add("state", "closed")
|
||||||
|
link.RawQuery = query.Encode()
|
||||||
|
req = NewRequest(t, "GET", link.String())
|
||||||
|
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
DecodeJSON(t, resp, &apiIssues)
|
||||||
|
assert.Len(t, apiIssues, 2)
|
||||||
|
|
||||||
|
query.Set("state", "all")
|
||||||
|
link.RawQuery = query.Encode()
|
||||||
|
req = NewRequest(t, "GET", link.String())
|
||||||
|
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
DecodeJSON(t, resp, &apiIssues)
|
||||||
|
assert.Len(t, apiIssues, 10) //there are more but 10 is page item limit
|
||||||
|
|
||||||
|
query.Add("page", "2")
|
||||||
|
link.RawQuery = query.Encode()
|
||||||
|
req = NewRequest(t, "GET", link.String())
|
||||||
|
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
DecodeJSON(t, resp, &apiIssues)
|
||||||
|
assert.Len(t, apiIssues, 0)
|
||||||
|
}
|
||||||
|
|
|
@ -351,6 +351,17 @@ func doBranchProtectPRMerge(baseCtx *APITestContext, dstPath string) func(t *tes
|
||||||
pr, err = doAPICreatePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, "protected", "unprotected")(t)
|
pr, err = doAPICreatePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, "protected", "unprotected")(t)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
})
|
})
|
||||||
|
t.Run("GenerateCommit", func(t *testing.T) {
|
||||||
|
_, err := generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
})
|
||||||
|
t.Run("PushToUnprotectedBranch", doGitPushTestRepository(dstPath, "origin", "protected:unprotected-2"))
|
||||||
|
var pr2 api.PullRequest
|
||||||
|
t.Run("CreatePullRequest", func(t *testing.T) {
|
||||||
|
pr2, err = doAPICreatePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, "unprotected", "unprotected-2")(t)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
})
|
||||||
|
t.Run("MergePR2", doAPIMergePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr2.Index))
|
||||||
t.Run("MergePR", doAPIMergePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index))
|
t.Run("MergePR", doAPIMergePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index))
|
||||||
t.Run("PullProtected", doGitPull(dstPath, "origin", "protected"))
|
t.Run("PullProtected", doGitPull(dstPath, "origin", "protected"))
|
||||||
t.Run("ProtectProtectedBranchWhitelist", doProtectBranch(ctx, "protected", baseCtx.Username))
|
t.Run("ProtectProtectedBranchWhitelist", doProtectBranch(ctx, "protected", baseCtx.Username))
|
||||||
|
@ -422,6 +433,9 @@ func doPushCreate(ctx APITestContext, u *url.URL) func(t *testing.T) {
|
||||||
tmpDir, err := ioutil.TempDir("", ctx.Reponame)
|
tmpDir, err := ioutil.TempDir("", ctx.Reponame)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = git.NewCommand("clone", u.String()).RunInDir(tmpDir)
|
||||||
|
assert.Error(t, err)
|
||||||
|
|
||||||
err = git.InitRepository(tmpDir, false)
|
err = git.InitRepository(tmpDir, false)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
@ -449,6 +463,13 @@ func doPushCreate(ctx APITestContext, u *url.URL) func(t *testing.T) {
|
||||||
_, err = git.NewCommand("remote", "add", "origin", u.String()).RunInDir(tmpDir)
|
_, err = git.NewCommand("remote", "add", "origin", u.String()).RunInDir(tmpDir)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
invalidCtx := ctx
|
||||||
|
invalidCtx.Reponame = fmt.Sprintf("invalid/repo-tmp-push-create-%s", u.Scheme)
|
||||||
|
u.Path = invalidCtx.GitPath()
|
||||||
|
|
||||||
|
_, err = git.NewCommand("remote", "add", "invalid", u.String()).RunInDir(tmpDir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
// Push to create disabled
|
// Push to create disabled
|
||||||
setting.Repository.EnablePushCreateUser = false
|
setting.Repository.EnablePushCreateUser = false
|
||||||
_, err = git.NewCommand("push", "origin", "master").RunInDir(tmpDir)
|
_, err = git.NewCommand("push", "origin", "master").RunInDir(tmpDir)
|
||||||
|
@ -456,6 +477,12 @@ func doPushCreate(ctx APITestContext, u *url.URL) func(t *testing.T) {
|
||||||
|
|
||||||
// Push to create enabled
|
// Push to create enabled
|
||||||
setting.Repository.EnablePushCreateUser = true
|
setting.Repository.EnablePushCreateUser = true
|
||||||
|
|
||||||
|
// Invalid repo
|
||||||
|
_, err = git.NewCommand("push", "invalid", "master").RunInDir(tmpDir)
|
||||||
|
assert.Error(t, err)
|
||||||
|
|
||||||
|
// Valid repo
|
||||||
_, err = git.NewCommand("push", "origin", "master").RunInDir(tmpDir)
|
_, err = git.NewCommand("push", "origin", "master").RunInDir(tmpDir)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1 +1 @@
|
||||||
0cf15c3f66ec8384480ed9c3cf87c9e97fbb0ec3
|
423313fbd38093bb10d0c8387db9105409c6f196
|
||||||
|
|
|
@ -126,7 +126,7 @@ func restoreOldDB(t *testing.T, version string) bool {
|
||||||
err := os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
|
err := os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
db, err := sql.Open("sqlite3", fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d", setting.Database.Path, setting.Database.Timeout))
|
db, err := sql.Open("sqlite3", fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d&_txlock=immediate", setting.Database.Path, setting.Database.Timeout))
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
defer db.Close()
|
defer db.Close()
|
||||||
|
|
||||||
|
|
|
@ -106,3 +106,57 @@ func TestPullCreate_TitleEscape(t *testing.T) {
|
||||||
assert.Equal(t, "<u>XSS PR</u>", titleHTML)
|
assert.Equal(t, "<u>XSS PR</u>", titleHTML)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func testUIDeleteBranch(t *testing.T, session *TestSession, ownerName, repoName, branchName string) {
|
||||||
|
relURL := "/" + path.Join(ownerName, repoName, "branches")
|
||||||
|
req := NewRequest(t, "GET", relURL)
|
||||||
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||||
|
|
||||||
|
req = NewRequestWithValues(t, "POST", relURL+"/delete", map[string]string{
|
||||||
|
"_csrf": getCsrf(t, htmlDoc.doc),
|
||||||
|
"name": branchName,
|
||||||
|
})
|
||||||
|
session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
}
|
||||||
|
|
||||||
|
func testDeleteRepository(t *testing.T, session *TestSession, ownerName, repoName string) {
|
||||||
|
relURL := "/" + path.Join(ownerName, repoName, "settings")
|
||||||
|
req := NewRequest(t, "GET", relURL)
|
||||||
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||||
|
|
||||||
|
req = NewRequestWithValues(t, "POST", relURL+"?action=delete", map[string]string{
|
||||||
|
"_csrf": getCsrf(t, htmlDoc.doc),
|
||||||
|
"repo_name": repoName,
|
||||||
|
})
|
||||||
|
session.MakeRequest(t, req, http.StatusFound)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPullBranchDelete(t *testing.T) {
|
||||||
|
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
||||||
|
defer prepareTestEnv(t)()
|
||||||
|
|
||||||
|
session := loginUser(t, "user1")
|
||||||
|
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
||||||
|
testCreateBranch(t, session, "user1", "repo1", "branch/master", "master1", http.StatusFound)
|
||||||
|
testEditFile(t, session, "user1", "repo1", "master1", "README.md", "Hello, World (Edited)\n")
|
||||||
|
resp := testPullCreate(t, session, "user1", "repo1", "master1", "This is a pull title")
|
||||||
|
|
||||||
|
// check the redirected URL
|
||||||
|
url := resp.HeaderMap.Get("Location")
|
||||||
|
assert.Regexp(t, "^/user2/repo1/pulls/[0-9]*$", url)
|
||||||
|
req := NewRequest(t, "GET", url)
|
||||||
|
session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
// delete head branch and confirm pull page is ok
|
||||||
|
testUIDeleteBranch(t, session, "user1", "repo1", "master1")
|
||||||
|
req = NewRequest(t, "GET", url)
|
||||||
|
session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
// delete head repository and confirm pull page is ok
|
||||||
|
testDeleteRepository(t, session, "user1", "repo1")
|
||||||
|
req = NewRequest(t, "GET", url)
|
||||||
|
session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
@ -105,8 +105,6 @@ func TestPullRebase(t *testing.T) {
|
||||||
|
|
||||||
func TestPullRebaseMerge(t *testing.T) {
|
func TestPullRebaseMerge(t *testing.T) {
|
||||||
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
||||||
defer prepareTestEnv(t)()
|
|
||||||
|
|
||||||
hookTasks, err := models.HookTasks(1, 1) //Retrieve previous hook number
|
hookTasks, err := models.HookTasks(1, 1) //Retrieve previous hook number
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
hookTasksLenBefore := len(hookTasks)
|
hookTasksLenBefore := len(hookTasks)
|
||||||
|
@ -129,8 +127,6 @@ func TestPullRebaseMerge(t *testing.T) {
|
||||||
|
|
||||||
func TestPullSquash(t *testing.T) {
|
func TestPullSquash(t *testing.T) {
|
||||||
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
||||||
defer prepareTestEnv(t)()
|
|
||||||
|
|
||||||
hookTasks, err := models.HookTasks(1, 1) //Retrieve previous hook number
|
hookTasks, err := models.HookTasks(1, 1) //Retrieve previous hook number
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
hookTasksLenBefore := len(hookTasks)
|
hookTasksLenBefore := len(hookTasks)
|
||||||
|
@ -154,10 +150,9 @@ func TestPullSquash(t *testing.T) {
|
||||||
|
|
||||||
func TestPullCleanUpAfterMerge(t *testing.T) {
|
func TestPullCleanUpAfterMerge(t *testing.T) {
|
||||||
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
||||||
defer prepareTestEnv(t)()
|
|
||||||
session := loginUser(t, "user1")
|
session := loginUser(t, "user1")
|
||||||
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
||||||
testEditFileToNewBranch(t, session, "user1", "repo1", "master", "feature/test", "README.md", "Hello, World (Edited)\n")
|
testEditFileToNewBranch(t, session, "user1", "repo1", "master", "feature/test", "README.md", "Hello, World (Edited - TestPullCleanUpAfterMerge)\n")
|
||||||
|
|
||||||
resp := testPullCreate(t, session, "user1", "repo1", "feature/test", "This is a pull title")
|
resp := testPullCreate(t, session, "user1", "repo1", "feature/test", "This is a pull title")
|
||||||
|
|
||||||
|
@ -190,7 +185,6 @@ func TestPullCleanUpAfterMerge(t *testing.T) {
|
||||||
|
|
||||||
func TestCantMergeWorkInProgress(t *testing.T) {
|
func TestCantMergeWorkInProgress(t *testing.T) {
|
||||||
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
||||||
defer prepareTestEnv(t)()
|
|
||||||
session := loginUser(t, "user1")
|
session := loginUser(t, "user1")
|
||||||
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
||||||
testEditFile(t, session, "user1", "repo1", "master", "README.md", "Hello, World (Edited)\n")
|
testEditFile(t, session, "user1", "repo1", "master", "README.md", "Hello, World (Edited)\n")
|
||||||
|
@ -212,7 +206,6 @@ func TestCantMergeWorkInProgress(t *testing.T) {
|
||||||
|
|
||||||
func TestCantMergeConflict(t *testing.T) {
|
func TestCantMergeConflict(t *testing.T) {
|
||||||
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
||||||
defer prepareTestEnv(t)()
|
|
||||||
session := loginUser(t, "user1")
|
session := loginUser(t, "user1")
|
||||||
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
||||||
testEditFileToNewBranch(t, session, "user1", "repo1", "master", "conflict", "README.md", "Hello, World (Edited Once)\n")
|
testEditFileToNewBranch(t, session, "user1", "repo1", "master", "conflict", "README.md", "Hello, World (Edited Once)\n")
|
||||||
|
@ -258,7 +251,6 @@ func TestCantMergeConflict(t *testing.T) {
|
||||||
|
|
||||||
func TestCantMergeUnrelated(t *testing.T) {
|
func TestCantMergeUnrelated(t *testing.T) {
|
||||||
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
||||||
defer prepareTestEnv(t)()
|
|
||||||
session := loginUser(t, "user1")
|
session := loginUser(t, "user1")
|
||||||
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
||||||
testEditFileToNewBranch(t, session, "user1", "repo1", "master", "base", "README.md", "Hello, World (Edited Twice)\n")
|
testEditFileToNewBranch(t, session, "user1", "repo1", "master", "base", "README.md", "Hello, World (Edited Twice)\n")
|
||||||
|
|
|
@ -11,7 +11,6 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models"
|
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
@ -48,20 +47,20 @@ func TestPullCreate_CommitStatus(t *testing.T) {
|
||||||
|
|
||||||
commitID := path.Base(commitURL)
|
commitID := path.Base(commitURL)
|
||||||
|
|
||||||
statusList := []models.CommitStatusState{
|
statusList := []api.CommitStatusState{
|
||||||
models.CommitStatusPending,
|
api.CommitStatusPending,
|
||||||
models.CommitStatusError,
|
api.CommitStatusError,
|
||||||
models.CommitStatusFailure,
|
api.CommitStatusFailure,
|
||||||
models.CommitStatusWarning,
|
api.CommitStatusWarning,
|
||||||
models.CommitStatusSuccess,
|
api.CommitStatusSuccess,
|
||||||
}
|
}
|
||||||
|
|
||||||
statesIcons := map[models.CommitStatusState]string{
|
statesIcons := map[api.CommitStatusState]string{
|
||||||
models.CommitStatusPending: "circle icon yellow",
|
api.CommitStatusPending: "circle icon yellow",
|
||||||
models.CommitStatusSuccess: "check icon green",
|
api.CommitStatusSuccess: "check icon green",
|
||||||
models.CommitStatusError: "warning icon red",
|
api.CommitStatusError: "warning icon red",
|
||||||
models.CommitStatusFailure: "remove icon red",
|
api.CommitStatusFailure: "remove icon red",
|
||||||
models.CommitStatusWarning: "warning sign icon yellow",
|
api.CommitStatusWarning: "warning sign icon yellow",
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update commit status, and check if icon is updated as well
|
// Update commit status, and check if icon is updated as well
|
||||||
|
|
|
@ -20,7 +20,7 @@ func createNewRelease(t *testing.T, session *TestSession, repoURL, tag, title st
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
htmlDoc := NewHTMLParser(t, resp.Body)
|
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||||
|
|
||||||
link, exists := htmlDoc.doc.Find("form").Attr("action")
|
link, exists := htmlDoc.doc.Find("form.ui.form").Attr("action")
|
||||||
assert.True(t, exists, "The template has changed")
|
assert.True(t, exists, "The template has changed")
|
||||||
|
|
||||||
postData := map[string]string{
|
postData := map[string]string{
|
||||||
|
|
|
@ -122,10 +122,13 @@ func (a *Action) ShortActUserName() string {
|
||||||
return base.EllipsisString(a.GetActUserName(), 20)
|
return base.EllipsisString(a.GetActUserName(), 20)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetDisplayName gets the action's display name based on DEFAULT_SHOW_FULL_NAME
|
// GetDisplayName gets the action's display name based on DEFAULT_SHOW_FULL_NAME, or falls back to the username if it is blank.
|
||||||
func (a *Action) GetDisplayName() string {
|
func (a *Action) GetDisplayName() string {
|
||||||
if setting.UI.DefaultShowFullName {
|
if setting.UI.DefaultShowFullName {
|
||||||
return a.GetActFullName()
|
trimmedFullName := strings.TrimSpace(a.GetActFullName())
|
||||||
|
if len(trimmedFullName) > 0 {
|
||||||
|
return trimmedFullName
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return a.ShortActUserName()
|
return a.ShortActUserName()
|
||||||
}
|
}
|
||||||
|
@ -212,7 +215,7 @@ func (a *Action) getCommentLink(e Engine) string {
|
||||||
return "#"
|
return "#"
|
||||||
}
|
}
|
||||||
if a.Comment == nil && a.CommentID != 0 {
|
if a.Comment == nil && a.CommentID != 0 {
|
||||||
a.Comment, _ = GetCommentByID(a.CommentID)
|
a.Comment, _ = getCommentByID(e, a.CommentID)
|
||||||
}
|
}
|
||||||
if a.Comment != nil {
|
if a.Comment != nil {
|
||||||
return a.Comment.HTMLURL()
|
return a.Comment.HTMLURL()
|
||||||
|
@ -432,6 +435,8 @@ func GetFeeds(opts GetFeedsOptions) ([]*Action, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
cond = cond.And(builder.In("repo_id", repoIDs))
|
cond = cond.And(builder.In("repo_id", repoIDs))
|
||||||
|
} else {
|
||||||
|
cond = cond.And(builder.In("repo_id", AccessibleRepoIDsQuery(opts.RequestingUserID)))
|
||||||
}
|
}
|
||||||
|
|
||||||
cond = cond.And(builder.Eq{"user_id": opts.RequestedUser.ID})
|
cond = cond.And(builder.Eq{"user_id": opts.RequestedUser.ID})
|
||||||
|
|
|
@ -79,7 +79,11 @@ func (a *Attachment) LinkedRepository() (*Repository, UnitType, error) {
|
||||||
return nil, UnitTypeIssues, err
|
return nil, UnitTypeIssues, err
|
||||||
}
|
}
|
||||||
repo, err := GetRepositoryByID(iss.RepoID)
|
repo, err := GetRepositoryByID(iss.RepoID)
|
||||||
return repo, UnitTypeIssues, err
|
unitType := UnitTypeIssues
|
||||||
|
if iss.IsPull {
|
||||||
|
unitType = UnitTypePullRequests
|
||||||
|
}
|
||||||
|
return repo, unitType, err
|
||||||
} else if a.ReleaseID != 0 {
|
} else if a.ReleaseID != 0 {
|
||||||
rel, err := GetReleaseByID(a.ReleaseID)
|
rel, err := GetReleaseByID(a.ReleaseID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -195,7 +199,7 @@ func GetAttachmentsByCommentID(commentID int64) ([]*Attachment, error) {
|
||||||
|
|
||||||
func getAttachmentsByCommentID(e Engine, commentID int64) ([]*Attachment, error) {
|
func getAttachmentsByCommentID(e Engine, commentID int64) ([]*Attachment, error) {
|
||||||
attachments := make([]*Attachment, 0, 10)
|
attachments := make([]*Attachment, 0, 10)
|
||||||
return attachments, x.Where("comment_id=?", commentID).Find(&attachments)
|
return attachments, e.Where("comment_id=?", commentID).Find(&attachments)
|
||||||
}
|
}
|
||||||
|
|
||||||
// getAttachmentByReleaseIDFileName return a file based on the the following infos:
|
// getAttachmentByReleaseIDFileName return a file based on the the following infos:
|
||||||
|
|
|
@ -138,7 +138,7 @@ func TestLinkedRepository(t *testing.T) {
|
||||||
expectedUnitType UnitType
|
expectedUnitType UnitType
|
||||||
}{
|
}{
|
||||||
{"LinkedIssue", 1, &Repository{ID: 1}, UnitTypeIssues},
|
{"LinkedIssue", 1, &Repository{ID: 1}, UnitTypeIssues},
|
||||||
{"LinkedComment", 3, &Repository{ID: 1}, UnitTypeIssues},
|
{"LinkedComment", 3, &Repository{ID: 1}, UnitTypePullRequests},
|
||||||
{"LinkedRelease", 9, &Repository{ID: 1}, UnitTypeReleases},
|
{"LinkedRelease", 9, &Repository{ID: 1}, UnitTypeReleases},
|
||||||
{"Notlinked", 10, nil, -1},
|
{"Notlinked", 10, nil, -1},
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,52 +19,19 @@ import (
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
)
|
)
|
||||||
|
|
||||||
// CommitStatusState holds the state of a Status
|
|
||||||
// It can be "pending", "success", "error", "failure", and "warning"
|
|
||||||
type CommitStatusState string
|
|
||||||
|
|
||||||
// IsWorseThan returns true if this State is worse than the given State
|
|
||||||
func (css CommitStatusState) IsWorseThan(css2 CommitStatusState) bool {
|
|
||||||
switch css {
|
|
||||||
case CommitStatusError:
|
|
||||||
return true
|
|
||||||
case CommitStatusFailure:
|
|
||||||
return css2 != CommitStatusError
|
|
||||||
case CommitStatusWarning:
|
|
||||||
return css2 != CommitStatusError && css2 != CommitStatusFailure
|
|
||||||
case CommitStatusSuccess:
|
|
||||||
return css2 != CommitStatusError && css2 != CommitStatusFailure && css2 != CommitStatusWarning
|
|
||||||
default:
|
|
||||||
return css2 != CommitStatusError && css2 != CommitStatusFailure && css2 != CommitStatusWarning && css2 != CommitStatusSuccess
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
// CommitStatusPending is for when the Status is Pending
|
|
||||||
CommitStatusPending CommitStatusState = "pending"
|
|
||||||
// CommitStatusSuccess is for when the Status is Success
|
|
||||||
CommitStatusSuccess CommitStatusState = "success"
|
|
||||||
// CommitStatusError is for when the Status is Error
|
|
||||||
CommitStatusError CommitStatusState = "error"
|
|
||||||
// CommitStatusFailure is for when the Status is Failure
|
|
||||||
CommitStatusFailure CommitStatusState = "failure"
|
|
||||||
// CommitStatusWarning is for when the Status is Warning
|
|
||||||
CommitStatusWarning CommitStatusState = "warning"
|
|
||||||
)
|
|
||||||
|
|
||||||
// CommitStatus holds a single Status of a single Commit
|
// CommitStatus holds a single Status of a single Commit
|
||||||
type CommitStatus struct {
|
type CommitStatus struct {
|
||||||
ID int64 `xorm:"pk autoincr"`
|
ID int64 `xorm:"pk autoincr"`
|
||||||
Index int64 `xorm:"INDEX UNIQUE(repo_sha_index)"`
|
Index int64 `xorm:"INDEX UNIQUE(repo_sha_index)"`
|
||||||
RepoID int64 `xorm:"INDEX UNIQUE(repo_sha_index)"`
|
RepoID int64 `xorm:"INDEX UNIQUE(repo_sha_index)"`
|
||||||
Repo *Repository `xorm:"-"`
|
Repo *Repository `xorm:"-"`
|
||||||
State CommitStatusState `xorm:"VARCHAR(7) NOT NULL"`
|
State api.CommitStatusState `xorm:"VARCHAR(7) NOT NULL"`
|
||||||
SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_sha_index)"`
|
SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_sha_index)"`
|
||||||
TargetURL string `xorm:"TEXT"`
|
TargetURL string `xorm:"TEXT"`
|
||||||
Description string `xorm:"TEXT"`
|
Description string `xorm:"TEXT"`
|
||||||
ContextHash string `xorm:"char(40) index"`
|
ContextHash string `xorm:"char(40) index"`
|
||||||
Context string `xorm:"TEXT"`
|
Context string `xorm:"TEXT"`
|
||||||
Creator *User `xorm:"-"`
|
Creator *User `xorm:"-"`
|
||||||
CreatorID int64
|
CreatorID int64
|
||||||
|
|
||||||
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
|
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
|
||||||
|
@ -118,9 +85,9 @@ func (status *CommitStatus) APIFormat() *api.Status {
|
||||||
// CalcCommitStatus returns commit status state via some status, the commit statues should order by id desc
|
// CalcCommitStatus returns commit status state via some status, the commit statues should order by id desc
|
||||||
func CalcCommitStatus(statuses []*CommitStatus) *CommitStatus {
|
func CalcCommitStatus(statuses []*CommitStatus) *CommitStatus {
|
||||||
var lastStatus *CommitStatus
|
var lastStatus *CommitStatus
|
||||||
var state CommitStatusState
|
var state api.CommitStatusState
|
||||||
for _, status := range statuses {
|
for _, status := range statuses {
|
||||||
if status.State.IsWorseThan(state) {
|
if status.State.NoBetterThan(state) {
|
||||||
state = status.State
|
state = status.State
|
||||||
lastStatus = status
|
lastStatus = status
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,7 @@ package models
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/structs"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -23,22 +24,22 @@ func TestGetCommitStatuses(t *testing.T) {
|
||||||
assert.Len(t, statuses, 5)
|
assert.Len(t, statuses, 5)
|
||||||
|
|
||||||
assert.Equal(t, "ci/awesomeness", statuses[0].Context)
|
assert.Equal(t, "ci/awesomeness", statuses[0].Context)
|
||||||
assert.Equal(t, CommitStatusPending, statuses[0].State)
|
assert.Equal(t, structs.CommitStatusPending, statuses[0].State)
|
||||||
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[0].APIURL())
|
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[0].APIURL())
|
||||||
|
|
||||||
assert.Equal(t, "cov/awesomeness", statuses[1].Context)
|
assert.Equal(t, "cov/awesomeness", statuses[1].Context)
|
||||||
assert.Equal(t, CommitStatusWarning, statuses[1].State)
|
assert.Equal(t, structs.CommitStatusWarning, statuses[1].State)
|
||||||
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[1].APIURL())
|
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[1].APIURL())
|
||||||
|
|
||||||
assert.Equal(t, "cov/awesomeness", statuses[2].Context)
|
assert.Equal(t, "cov/awesomeness", statuses[2].Context)
|
||||||
assert.Equal(t, CommitStatusSuccess, statuses[2].State)
|
assert.Equal(t, structs.CommitStatusSuccess, statuses[2].State)
|
||||||
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[2].APIURL())
|
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[2].APIURL())
|
||||||
|
|
||||||
assert.Equal(t, "ci/awesomeness", statuses[3].Context)
|
assert.Equal(t, "ci/awesomeness", statuses[3].Context)
|
||||||
assert.Equal(t, CommitStatusFailure, statuses[3].State)
|
assert.Equal(t, structs.CommitStatusFailure, statuses[3].State)
|
||||||
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[3].APIURL())
|
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[3].APIURL())
|
||||||
|
|
||||||
assert.Equal(t, "deploy/awesomeness", statuses[4].Context)
|
assert.Equal(t, "deploy/awesomeness", statuses[4].Context)
|
||||||
assert.Equal(t, CommitStatusError, statuses[4].State)
|
assert.Equal(t, structs.CommitStatusError, statuses[4].State)
|
||||||
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[4].APIURL())
|
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[4].APIURL())
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,7 @@ package models
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
)
|
)
|
||||||
|
@ -56,6 +57,21 @@ func (err ErrNamePatternNotAllowed) Error() string {
|
||||||
return fmt.Sprintf("name pattern is not allowed [pattern: %s]", err.Pattern)
|
return fmt.Sprintf("name pattern is not allowed [pattern: %s]", err.Pattern)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ErrNameCharsNotAllowed represents a "character not allowed in name" error.
|
||||||
|
type ErrNameCharsNotAllowed struct {
|
||||||
|
Name string
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsErrNameCharsNotAllowed checks if an error is an ErrNameCharsNotAllowed.
|
||||||
|
func IsErrNameCharsNotAllowed(err error) bool {
|
||||||
|
_, ok := err.(ErrNameCharsNotAllowed)
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err ErrNameCharsNotAllowed) Error() string {
|
||||||
|
return fmt.Sprintf("User name is invalid [%s]: must be valid alpha or numeric or dash(-_) or dot characters", err.Name)
|
||||||
|
}
|
||||||
|
|
||||||
// ErrSSHDisabled represents an "SSH disabled" error.
|
// ErrSSHDisabled represents an "SSH disabled" error.
|
||||||
type ErrSSHDisabled struct {
|
type ErrSSHDisabled struct {
|
||||||
}
|
}
|
||||||
|
@ -1355,6 +1371,53 @@ func (err ErrMergePushOutOfDate) Error() string {
|
||||||
return fmt.Sprintf("Merge PushOutOfDate Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut)
|
return fmt.Sprintf("Merge PushOutOfDate Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ErrPushRejected represents an error if merging fails due to rejection from a hook
|
||||||
|
type ErrPushRejected struct {
|
||||||
|
Style MergeStyle
|
||||||
|
Message string
|
||||||
|
StdOut string
|
||||||
|
StdErr string
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsErrPushRejected checks if an error is a ErrPushRejected.
|
||||||
|
func IsErrPushRejected(err error) bool {
|
||||||
|
_, ok := err.(ErrPushRejected)
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err ErrPushRejected) Error() string {
|
||||||
|
return fmt.Sprintf("Merge PushRejected Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GenerateMessage generates the remote message from the stderr
|
||||||
|
func (err *ErrPushRejected) GenerateMessage() {
|
||||||
|
messageBuilder := &strings.Builder{}
|
||||||
|
i := strings.Index(err.StdErr, "remote: ")
|
||||||
|
if i < 0 {
|
||||||
|
err.Message = ""
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for {
|
||||||
|
if len(err.StdErr) <= i+8 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err.StdErr[i:i+8] != "remote: " {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
i += 8
|
||||||
|
nl := strings.IndexByte(err.StdErr[i:], '\n')
|
||||||
|
if nl > 0 {
|
||||||
|
messageBuilder.WriteString(err.StdErr[i : i+nl+1])
|
||||||
|
i = i + nl + 1
|
||||||
|
} else {
|
||||||
|
messageBuilder.WriteString(err.StdErr[i:])
|
||||||
|
i = len(err.StdErr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err.Message = strings.TrimSpace(messageBuilder.String())
|
||||||
|
}
|
||||||
|
|
||||||
// ErrRebaseConflicts represents an error if rebase fails with a conflict
|
// ErrRebaseConflicts represents an error if rebase fails with a conflict
|
||||||
type ErrRebaseConflicts struct {
|
type ErrRebaseConflicts struct {
|
||||||
Style MergeStyle
|
Style MergeStyle
|
||||||
|
|
|
@ -369,6 +369,7 @@ type CommitVerification struct {
|
||||||
CommittingUser *User
|
CommittingUser *User
|
||||||
SigningEmail string
|
SigningEmail string
|
||||||
SigningKey *GPGKey
|
SigningKey *GPGKey
|
||||||
|
TrustStatus string
|
||||||
}
|
}
|
||||||
|
|
||||||
// SignCommit represents a commit with validation of signature.
|
// SignCommit represents a commit with validation of signature.
|
||||||
|
@ -754,18 +755,54 @@ func verifyWithGPGSettings(gpgSettings *git.GPGSettings, sig *packet.Signature,
|
||||||
}
|
}
|
||||||
|
|
||||||
// ParseCommitsWithSignature checks if signaute of commits are corresponding to users gpg keys.
|
// ParseCommitsWithSignature checks if signaute of commits are corresponding to users gpg keys.
|
||||||
func ParseCommitsWithSignature(oldCommits *list.List) *list.List {
|
func ParseCommitsWithSignature(oldCommits *list.List, repository *Repository) *list.List {
|
||||||
var (
|
var (
|
||||||
newCommits = list.New()
|
newCommits = list.New()
|
||||||
e = oldCommits.Front()
|
e = oldCommits.Front()
|
||||||
)
|
)
|
||||||
|
memberMap := map[int64]bool{}
|
||||||
|
|
||||||
for e != nil {
|
for e != nil {
|
||||||
c := e.Value.(UserCommit)
|
c := e.Value.(UserCommit)
|
||||||
newCommits.PushBack(SignCommit{
|
signCommit := SignCommit{
|
||||||
UserCommit: &c,
|
UserCommit: &c,
|
||||||
Verification: ParseCommitWithSignature(c.Commit),
|
Verification: ParseCommitWithSignature(c.Commit),
|
||||||
})
|
}
|
||||||
|
|
||||||
|
_ = CalculateTrustStatus(signCommit.Verification, repository, &memberMap)
|
||||||
|
|
||||||
|
newCommits.PushBack(signCommit)
|
||||||
e = e.Next()
|
e = e.Next()
|
||||||
}
|
}
|
||||||
return newCommits
|
return newCommits
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CalculateTrustStatus will calculate the TrustStatus for a commit verification within a repository
|
||||||
|
func CalculateTrustStatus(verification *CommitVerification, repository *Repository, memberMap *map[int64]bool) (err error) {
|
||||||
|
if verification.Verified {
|
||||||
|
verification.TrustStatus = "trusted"
|
||||||
|
if verification.SigningUser.ID != 0 {
|
||||||
|
var isMember bool
|
||||||
|
if memberMap != nil {
|
||||||
|
var has bool
|
||||||
|
isMember, has = (*memberMap)[verification.SigningUser.ID]
|
||||||
|
if !has {
|
||||||
|
isMember, err = repository.IsOwnerMemberCollaborator(verification.SigningUser.ID)
|
||||||
|
(*memberMap)[verification.SigningUser.ID] = isMember
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
isMember, err = repository.IsOwnerMemberCollaborator(verification.SigningUser.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isMember {
|
||||||
|
verification.TrustStatus = "untrusted"
|
||||||
|
if verification.CommittingUser.ID != verification.SigningUser.ID {
|
||||||
|
// The committing user and the signing user are not the same and are not the default key
|
||||||
|
// This should be marked as questionable unless the signing user is a collaborator/team member etc.
|
||||||
|
verification.TrustStatus = "unmatched"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
|
@ -381,6 +381,7 @@ func (issue *Issue) apiFormat(e Engine) *api.Issue {
|
||||||
apiIssue := &api.Issue{
|
apiIssue := &api.Issue{
|
||||||
ID: issue.ID,
|
ID: issue.ID,
|
||||||
URL: issue.APIURL(),
|
URL: issue.APIURL(),
|
||||||
|
HTMLURL: issue.HTMLURL(),
|
||||||
Index: issue.Index,
|
Index: issue.Index,
|
||||||
Poster: issue.Poster.APIFormat(),
|
Poster: issue.Poster.APIFormat(),
|
||||||
Title: issue.Title,
|
Title: issue.Title,
|
||||||
|
@ -402,11 +403,12 @@ func (issue *Issue) apiFormat(e Engine) *api.Issue {
|
||||||
apiIssue.Closed = issue.ClosedUnix.AsTimePtr()
|
apiIssue.Closed = issue.ClosedUnix.AsTimePtr()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
issue.loadMilestone(e)
|
||||||
if issue.Milestone != nil {
|
if issue.Milestone != nil {
|
||||||
apiIssue.Milestone = issue.Milestone.APIFormat()
|
apiIssue.Milestone = issue.Milestone.APIFormat()
|
||||||
}
|
}
|
||||||
issue.loadAssignees(e)
|
|
||||||
|
|
||||||
|
issue.loadAssignees(e)
|
||||||
if len(issue.Assignees) > 0 {
|
if len(issue.Assignees) > 0 {
|
||||||
for _, assignee := range issue.Assignees {
|
for _, assignee := range issue.Assignees {
|
||||||
apiIssue.Assignees = append(apiIssue.Assignees, assignee.APIFormat())
|
apiIssue.Assignees = append(apiIssue.Assignees, assignee.APIFormat())
|
||||||
|
@ -436,7 +438,7 @@ func (issue *Issue) HashTag() string {
|
||||||
|
|
||||||
// IsPoster returns true if given user by ID is the poster.
|
// IsPoster returns true if given user by ID is the poster.
|
||||||
func (issue *Issue) IsPoster(uid int64) bool {
|
func (issue *Issue) IsPoster(uid int64) bool {
|
||||||
return issue.PosterID == uid
|
return issue.OriginalAuthorID == 0 && issue.PosterID == uid
|
||||||
}
|
}
|
||||||
|
|
||||||
func (issue *Issue) hasLabel(e Engine, labelID int64) bool {
|
func (issue *Issue) hasLabel(e Engine, labelID int64) bool {
|
||||||
|
@ -671,6 +673,10 @@ func (issue *Issue) changeStatus(e *xorm.Session, doer *User, isClosed bool) (*C
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := issue.updateClosedNum(e); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// New action comment
|
// New action comment
|
||||||
cmtType := CommentTypeClose
|
cmtType := CommentTypeClose
|
||||||
if !issue.IsClosed {
|
if !issue.IsClosed {
|
||||||
|
@ -1334,6 +1340,36 @@ type IssueStatsOptions struct {
|
||||||
|
|
||||||
// GetIssueStats returns issue statistic information by given conditions.
|
// GetIssueStats returns issue statistic information by given conditions.
|
||||||
func GetIssueStats(opts *IssueStatsOptions) (*IssueStats, error) {
|
func GetIssueStats(opts *IssueStatsOptions) (*IssueStats, error) {
|
||||||
|
if len(opts.IssueIDs) <= maxQueryParameters {
|
||||||
|
return getIssueStatsChunk(opts, opts.IssueIDs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// If too long a list of IDs is provided, we get the statistics in
|
||||||
|
// smaller chunks and get accumulates. Note: this could potentially
|
||||||
|
// get us invalid results. The alternative is to insert the list of
|
||||||
|
// ids in a temporary table and join from them.
|
||||||
|
accum := &IssueStats{}
|
||||||
|
for i := 0; i < len(opts.IssueIDs); {
|
||||||
|
chunk := i + maxQueryParameters
|
||||||
|
if chunk > len(opts.IssueIDs) {
|
||||||
|
chunk = len(opts.IssueIDs)
|
||||||
|
}
|
||||||
|
stats, err := getIssueStatsChunk(opts, opts.IssueIDs[i:chunk])
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
accum.OpenCount += stats.OpenCount
|
||||||
|
accum.ClosedCount += stats.ClosedCount
|
||||||
|
accum.YourRepositoriesCount += stats.YourRepositoriesCount
|
||||||
|
accum.AssignCount += stats.AssignCount
|
||||||
|
accum.CreateCount += stats.CreateCount
|
||||||
|
accum.OpenCount += stats.MentionCount
|
||||||
|
i = chunk
|
||||||
|
}
|
||||||
|
return accum, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getIssueStatsChunk(opts *IssueStatsOptions, issueIDs []int64) (*IssueStats, error) {
|
||||||
stats := &IssueStats{}
|
stats := &IssueStats{}
|
||||||
|
|
||||||
countSession := func(opts *IssueStatsOptions) *xorm.Session {
|
countSession := func(opts *IssueStatsOptions) *xorm.Session {
|
||||||
|
|
|
@ -749,8 +749,12 @@ func CreateRefComment(doer *User, repo *Repository, issue *Issue, content, commi
|
||||||
|
|
||||||
// GetCommentByID returns the comment by given ID.
|
// GetCommentByID returns the comment by given ID.
|
||||||
func GetCommentByID(id int64) (*Comment, error) {
|
func GetCommentByID(id int64) (*Comment, error) {
|
||||||
|
return getCommentByID(x, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getCommentByID(e Engine, id int64) (*Comment, error) {
|
||||||
c := new(Comment)
|
c := new(Comment)
|
||||||
has, err := x.ID(id).Get(c)
|
has, err := e.ID(id).Get(c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
} else if !has {
|
} else if !has {
|
||||||
|
|
|
@ -376,6 +376,11 @@ func (comments CommentList) loadDependentIssues(e Engine) error {
|
||||||
for _, comment := range comments {
|
for _, comment := range comments {
|
||||||
if comment.DependentIssue == nil {
|
if comment.DependentIssue == nil {
|
||||||
comment.DependentIssue = issues[comment.DependentIssueID]
|
comment.DependentIssue = issues[comment.DependentIssueID]
|
||||||
|
if comment.DependentIssue != nil {
|
||||||
|
if err := comment.DependentIssue.loadRepo(e); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -6,6 +6,7 @@ package models
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
|
@ -95,6 +96,8 @@ func NewMilestone(m *Milestone) (err error) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
m.Name = strings.TrimSpace(m.Name)
|
||||||
|
|
||||||
if _, err = sess.Insert(m); err != nil {
|
if _, err = sess.Insert(m); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -268,6 +271,7 @@ func GetMilestones(repoID int64, page int, isClosed bool, sortType string) (Mile
|
||||||
}
|
}
|
||||||
|
|
||||||
func updateMilestone(e Engine, m *Milestone) error {
|
func updateMilestone(e Engine, m *Milestone) error {
|
||||||
|
m.Name = strings.TrimSpace(m.Name)
|
||||||
_, err := e.ID(m.ID).AllCols().
|
_, err := e.ID(m.ID).AllCols().
|
||||||
SetExpr("num_issues", builder.Select("count(*)").From("issue").Where(
|
SetExpr("num_issues", builder.Select("count(*)").From("issue").Where(
|
||||||
builder.Eq{"milestone_id": m.ID},
|
builder.Eq{"milestone_id": m.ID},
|
||||||
|
@ -283,12 +287,33 @@ func updateMilestone(e Engine, m *Milestone) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateMilestone updates information of given milestone.
|
// UpdateMilestone updates information of given milestone.
|
||||||
func UpdateMilestone(m *Milestone) error {
|
func UpdateMilestone(m *Milestone, oldIsClosed bool) error {
|
||||||
if err := updateMilestone(x, m); err != nil {
|
sess := x.NewSession()
|
||||||
|
defer sess.Close()
|
||||||
|
if err := sess.Begin(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return updateMilestoneCompleteness(x, m.ID)
|
if m.IsClosed && !oldIsClosed {
|
||||||
|
m.ClosedDateUnix = timeutil.TimeStampNow()
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := updateMilestone(sess, m); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := updateMilestoneCompleteness(sess, m.ID); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// if IsClosed changed, update milestone numbers of repository
|
||||||
|
if oldIsClosed != m.IsClosed {
|
||||||
|
if err := updateRepoMilestoneNum(sess, m.RepoID); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sess.Commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
func updateMilestoneCompleteness(e Engine, milestoneID int64) error {
|
func updateMilestoneCompleteness(e Engine, milestoneID int64) error {
|
||||||
|
|
|
@ -158,10 +158,11 @@ func TestUpdateMilestone(t *testing.T) {
|
||||||
assert.NoError(t, PrepareTestDatabase())
|
assert.NoError(t, PrepareTestDatabase())
|
||||||
|
|
||||||
milestone := AssertExistsAndLoadBean(t, &Milestone{ID: 1}).(*Milestone)
|
milestone := AssertExistsAndLoadBean(t, &Milestone{ID: 1}).(*Milestone)
|
||||||
milestone.Name = "newMilestoneName"
|
milestone.Name = " newMilestoneName "
|
||||||
milestone.Content = "newMilestoneContent"
|
milestone.Content = "newMilestoneContent"
|
||||||
assert.NoError(t, UpdateMilestone(milestone))
|
assert.NoError(t, UpdateMilestone(milestone, milestone.IsClosed))
|
||||||
AssertExistsAndLoadBean(t, milestone)
|
milestone = AssertExistsAndLoadBean(t, &Milestone{ID: 1}).(*Milestone)
|
||||||
|
assert.EqualValues(t, "newMilestoneName", milestone.Name)
|
||||||
CheckConsistencyFor(t, &Milestone{})
|
CheckConsistencyFor(t, &Milestone{})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -64,14 +64,18 @@ func getIssueWatch(e Engine, userID, issueID int64) (iw *IssueWatch, exists bool
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetIssueWatchersIDs returns IDs of subscribers to a given issue id
|
// GetIssueWatchersIDs returns IDs of subscribers or explicit unsubscribers to a given issue id
|
||||||
// but avoids joining with `user` for performance reasons
|
// but avoids joining with `user` for performance reasons
|
||||||
// User permissions must be verified elsewhere if required
|
// User permissions must be verified elsewhere if required
|
||||||
func GetIssueWatchersIDs(issueID int64) ([]int64, error) {
|
func GetIssueWatchersIDs(issueID int64, watching bool) ([]int64, error) {
|
||||||
|
return getIssueWatchersIDs(x, issueID, watching)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getIssueWatchersIDs(e Engine, issueID int64, watching bool) ([]int64, error) {
|
||||||
ids := make([]int64, 0, 64)
|
ids := make([]int64, 0, 64)
|
||||||
return ids, x.Table("issue_watch").
|
return ids, e.Table("issue_watch").
|
||||||
Where("issue_id=?", issueID).
|
Where("issue_id=?", issueID).
|
||||||
And("is_watching = ?", true).
|
And("is_watching = ?", watching).
|
||||||
Select("user_id").
|
Select("user_id").
|
||||||
Find(&ids)
|
Find(&ids)
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,6 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/smtp"
|
"net/smtp"
|
||||||
"net/textproto"
|
"net/textproto"
|
||||||
"regexp"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/auth/ldap"
|
"code.gitea.io/gitea/modules/auth/ldap"
|
||||||
|
@ -455,13 +454,9 @@ func composeFullName(firstname, surname, username string) string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
|
||||||
alphaDashDotPattern = regexp.MustCompile(`[^\w-\.]`)
|
|
||||||
)
|
|
||||||
|
|
||||||
// LoginViaLDAP queries if login/password is valid against the LDAP directory pool,
|
// LoginViaLDAP queries if login/password is valid against the LDAP directory pool,
|
||||||
// and create a local user if success when enabled.
|
// and create a local user if success when enabled.
|
||||||
func LoginViaLDAP(user *User, login, password string, source *LoginSource, autoRegister bool) (*User, error) {
|
func LoginViaLDAP(user *User, login, password string, source *LoginSource) (*User, error) {
|
||||||
sr := source.Cfg.(*LDAPConfig).SearchEntry(login, password, source.Type == LoginDLDAP)
|
sr := source.Cfg.(*LDAPConfig).SearchEntry(login, password, source.Type == LoginDLDAP)
|
||||||
if sr == nil {
|
if sr == nil {
|
||||||
// User not in LDAP, do nothing
|
// User not in LDAP, do nothing
|
||||||
|
@ -473,17 +468,25 @@ func LoginViaLDAP(user *User, login, password string, source *LoginSource, autoR
|
||||||
// Update User admin flag if exist
|
// Update User admin flag if exist
|
||||||
if isExist, err := IsUserExist(0, sr.Username); err != nil {
|
if isExist, err := IsUserExist(0, sr.Username); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
} else if isExist &&
|
} else if isExist {
|
||||||
!user.ProhibitLogin && len(source.LDAP().AdminFilter) > 0 && user.IsAdmin != sr.IsAdmin {
|
if user == nil {
|
||||||
// Change existing admin flag only if AdminFilter option is set
|
user, err = GetUserByName(sr.Username)
|
||||||
user.IsAdmin = sr.IsAdmin
|
if err != nil {
|
||||||
err = UpdateUserCols(user, "is_admin")
|
return nil, err
|
||||||
if err != nil {
|
}
|
||||||
return nil, err
|
}
|
||||||
|
if user != nil &&
|
||||||
|
!user.ProhibitLogin && len(source.LDAP().AdminFilter) > 0 && user.IsAdmin != sr.IsAdmin {
|
||||||
|
// Change existing admin flag only if AdminFilter option is set
|
||||||
|
user.IsAdmin = sr.IsAdmin
|
||||||
|
err = UpdateUserCols(user, "is_admin")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !autoRegister {
|
if user != nil {
|
||||||
if isAttributeSSHPublicKeySet && synchronizeLdapSSHPublicKeys(user, source, sr.SSHPublicKey) {
|
if isAttributeSSHPublicKeySet && synchronizeLdapSSHPublicKeys(user, source, sr.SSHPublicKey) {
|
||||||
return user, RewriteAllPublicKeys()
|
return user, RewriteAllPublicKeys()
|
||||||
}
|
}
|
||||||
|
@ -495,10 +498,6 @@ func LoginViaLDAP(user *User, login, password string, source *LoginSource, autoR
|
||||||
if len(sr.Username) == 0 {
|
if len(sr.Username) == 0 {
|
||||||
sr.Username = login
|
sr.Username = login
|
||||||
}
|
}
|
||||||
// Validate username make sure it satisfies requirement.
|
|
||||||
if alphaDashDotPattern.MatchString(sr.Username) {
|
|
||||||
return nil, fmt.Errorf("Invalid pattern for attribute 'username' [%s]: must be valid alpha or numeric or dash(-_) or dot characters", sr.Username)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(sr.Mail) == 0 {
|
if len(sr.Mail) == 0 {
|
||||||
sr.Mail = fmt.Sprintf("%s@localhost", sr.Username)
|
sr.Mail = fmt.Sprintf("%s@localhost", sr.Username)
|
||||||
|
@ -594,7 +593,7 @@ func SMTPAuth(a smtp.Auth, cfg *SMTPConfig) error {
|
||||||
|
|
||||||
// LoginViaSMTP queries if login/password is valid against the SMTP,
|
// LoginViaSMTP queries if login/password is valid against the SMTP,
|
||||||
// and create a local user if success when enabled.
|
// and create a local user if success when enabled.
|
||||||
func LoginViaSMTP(user *User, login, password string, sourceID int64, cfg *SMTPConfig, autoRegister bool) (*User, error) {
|
func LoginViaSMTP(user *User, login, password string, sourceID int64, cfg *SMTPConfig) (*User, error) {
|
||||||
// Verify allowed domains.
|
// Verify allowed domains.
|
||||||
if len(cfg.AllowedDomains) > 0 {
|
if len(cfg.AllowedDomains) > 0 {
|
||||||
idx := strings.Index(login, "@")
|
idx := strings.Index(login, "@")
|
||||||
|
@ -625,7 +624,7 @@ func LoginViaSMTP(user *User, login, password string, sourceID int64, cfg *SMTPC
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if !autoRegister {
|
if user != nil {
|
||||||
return user, nil
|
return user, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -657,33 +656,41 @@ func LoginViaSMTP(user *User, login, password string, sourceID int64, cfg *SMTPC
|
||||||
|
|
||||||
// LoginViaPAM queries if login/password is valid against the PAM,
|
// LoginViaPAM queries if login/password is valid against the PAM,
|
||||||
// and create a local user if success when enabled.
|
// and create a local user if success when enabled.
|
||||||
func LoginViaPAM(user *User, login, password string, sourceID int64, cfg *PAMConfig, autoRegister bool) (*User, error) {
|
func LoginViaPAM(user *User, login, password string, sourceID int64, cfg *PAMConfig) (*User, error) {
|
||||||
if err := pam.Auth(cfg.ServiceName, login, password); err != nil {
|
pamLogin, err := pam.Auth(cfg.ServiceName, login, password)
|
||||||
|
if err != nil {
|
||||||
if strings.Contains(err.Error(), "Authentication failure") {
|
if strings.Contains(err.Error(), "Authentication failure") {
|
||||||
return nil, ErrUserNotExist{0, login, 0}
|
return nil, ErrUserNotExist{0, login, 0}
|
||||||
}
|
}
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if !autoRegister {
|
if user != nil {
|
||||||
return user, nil
|
return user, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Allow PAM sources with `@` in their name, like from Active Directory
|
||||||
|
username := pamLogin
|
||||||
|
idx := strings.Index(pamLogin, "@")
|
||||||
|
if idx > -1 {
|
||||||
|
username = pamLogin[:idx]
|
||||||
|
}
|
||||||
|
|
||||||
user = &User{
|
user = &User{
|
||||||
LowerName: strings.ToLower(login),
|
LowerName: strings.ToLower(username),
|
||||||
Name: login,
|
Name: username,
|
||||||
Email: login,
|
Email: pamLogin,
|
||||||
Passwd: password,
|
Passwd: password,
|
||||||
LoginType: LoginPAM,
|
LoginType: LoginPAM,
|
||||||
LoginSource: sourceID,
|
LoginSource: sourceID,
|
||||||
LoginName: login,
|
LoginName: login, // This is what the user typed in
|
||||||
IsActive: true,
|
IsActive: true,
|
||||||
}
|
}
|
||||||
return user, CreateUser(user)
|
return user, CreateUser(user)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ExternalUserLogin attempts a login using external source types.
|
// ExternalUserLogin attempts a login using external source types.
|
||||||
func ExternalUserLogin(user *User, login, password string, source *LoginSource, autoRegister bool) (*User, error) {
|
func ExternalUserLogin(user *User, login, password string, source *LoginSource) (*User, error) {
|
||||||
if !source.IsActived {
|
if !source.IsActived {
|
||||||
return nil, ErrLoginSourceNotActived
|
return nil, ErrLoginSourceNotActived
|
||||||
}
|
}
|
||||||
|
@ -691,11 +698,11 @@ func ExternalUserLogin(user *User, login, password string, source *LoginSource,
|
||||||
var err error
|
var err error
|
||||||
switch source.Type {
|
switch source.Type {
|
||||||
case LoginLDAP, LoginDLDAP:
|
case LoginLDAP, LoginDLDAP:
|
||||||
user, err = LoginViaLDAP(user, login, password, source, autoRegister)
|
user, err = LoginViaLDAP(user, login, password, source)
|
||||||
case LoginSMTP:
|
case LoginSMTP:
|
||||||
user, err = LoginViaSMTP(user, login, password, source.ID, source.Cfg.(*SMTPConfig), autoRegister)
|
user, err = LoginViaSMTP(user, login, password, source.ID, source.Cfg.(*SMTPConfig))
|
||||||
case LoginPAM:
|
case LoginPAM:
|
||||||
user, err = LoginViaPAM(user, login, password, source.ID, source.Cfg.(*PAMConfig), autoRegister)
|
user, err = LoginViaPAM(user, login, password, source.ID, source.Cfg.(*PAMConfig))
|
||||||
default:
|
default:
|
||||||
return nil, ErrUnsupportedLoginType
|
return nil, ErrUnsupportedLoginType
|
||||||
}
|
}
|
||||||
|
@ -775,7 +782,7 @@ func UserSignIn(username, password string) (*User, error) {
|
||||||
return nil, ErrLoginSourceNotExist{user.LoginSource}
|
return nil, ErrLoginSourceNotExist{user.LoginSource}
|
||||||
}
|
}
|
||||||
|
|
||||||
return ExternalUserLogin(user, user.LoginName, password, &source, false)
|
return ExternalUserLogin(user, user.LoginName, password, &source)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -789,7 +796,7 @@ func UserSignIn(username, password string) (*User, error) {
|
||||||
// don't try to authenticate against OAuth2 and SSPI sources here
|
// don't try to authenticate against OAuth2 and SSPI sources here
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
authUser, err := ExternalUserLogin(nil, username, password, source, true)
|
authUser, err := ExternalUserLogin(nil, username, password, source)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
return authUser, nil
|
return authUser, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,23 +26,38 @@ func deleteOrphanedAttachments(x *xorm.Engine) error {
|
||||||
sess := x.NewSession()
|
sess := x.NewSession()
|
||||||
defer sess.Close()
|
defer sess.Close()
|
||||||
|
|
||||||
err := sess.BufferSize(setting.Database.IterateBufferSize).
|
var limit = setting.Database.IterateBufferSize
|
||||||
Where("`issue_id` = 0 and (`release_id` = 0 or `release_id` not in (select `id` from `release`))").Cols("uuid").
|
if limit <= 0 {
|
||||||
Iterate(new(Attachment),
|
limit = 50
|
||||||
func(idx int, bean interface{}) error {
|
|
||||||
attachment := bean.(*Attachment)
|
|
||||||
|
|
||||||
if err := os.RemoveAll(models.AttachmentLocalPath(attachment.UUID)); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := sess.ID(attachment.ID).NoAutoCondition().Delete(attachment)
|
|
||||||
return err
|
|
||||||
})
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return sess.Commit()
|
for {
|
||||||
|
attachements := make([]Attachment, 0, limit)
|
||||||
|
if err := sess.Where("`issue_id` = 0 and (`release_id` = 0 or `release_id` not in (select `id` from `release`))").
|
||||||
|
Cols("id, uuid").Limit(limit).
|
||||||
|
Asc("id").
|
||||||
|
Find(&attachements); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if len(attachements) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var ids = make([]int64, 0, limit)
|
||||||
|
for _, attachment := range attachements {
|
||||||
|
ids = append(ids, attachment.ID)
|
||||||
|
}
|
||||||
|
if _, err := sess.In("id", ids).Delete(new(Attachment)); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, attachment := range attachements {
|
||||||
|
if err := os.RemoveAll(models.AttachmentLocalPath(attachment.UUID)); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(attachements) < limit {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,6 +46,12 @@ type Engine interface {
|
||||||
Asc(colNames ...string) *xorm.Session
|
Asc(colNames ...string) *xorm.Session
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
// When queries are broken down in parts because of the number
|
||||||
|
// of parameters, attempt to break by this amount
|
||||||
|
maxQueryParameters = 300
|
||||||
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
x *xorm.Engine
|
x *xorm.Engine
|
||||||
tables []interface{}
|
tables []interface{}
|
||||||
|
|
|
@ -399,7 +399,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) {
|
||||||
|
|
||||||
// Create org.
|
// Create org.
|
||||||
org := &User{
|
org := &User{
|
||||||
Name: "All repo",
|
Name: "All_repo",
|
||||||
IsActive: true,
|
IsActive: true,
|
||||||
Type: UserTypeOrganization,
|
Type: UserTypeOrganization,
|
||||||
Visibility: structs.VisibleTypePublic,
|
Visibility: structs.VisibleTypePublic,
|
||||||
|
|
369
models/pull.go
369
models/pull.go
|
@ -17,6 +17,23 @@ import (
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ErrHeadRepoMissed represents an error to report pull's head repository missed
|
||||||
|
type ErrHeadRepoMissed struct {
|
||||||
|
PullID int64
|
||||||
|
HeadRepoID int64
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error implements the interface
|
||||||
|
func (err ErrHeadRepoMissed) Error() string {
|
||||||
|
return fmt.Sprintf("Head repository [%d] of pull id [%d] missed", err.HeadRepoID, err.PullID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsErrHeadRepoMissed returns true if head repository missed
|
||||||
|
func IsErrHeadRepoMissed(err error) bool {
|
||||||
|
_, ok := err.(ErrHeadRepoMissed)
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
// PullRequestType defines pull request type
|
// PullRequestType defines pull request type
|
||||||
type PullRequestType int
|
type PullRequestType int
|
||||||
|
|
||||||
|
@ -35,6 +52,7 @@ const (
|
||||||
PullRequestStatusChecking
|
PullRequestStatusChecking
|
||||||
PullRequestStatusMergeable
|
PullRequestStatusMergeable
|
||||||
PullRequestStatusManuallyMerged
|
PullRequestStatusManuallyMerged
|
||||||
|
PullRequestStatusError
|
||||||
)
|
)
|
||||||
|
|
||||||
// PullRequest represents relation between pull request and repositories.
|
// PullRequest represents relation between pull request and repositories.
|
||||||
|
@ -62,12 +80,21 @@ type PullRequest struct {
|
||||||
MergerID int64 `xorm:"INDEX"`
|
MergerID int64 `xorm:"INDEX"`
|
||||||
Merger *User `xorm:"-"`
|
Merger *User `xorm:"-"`
|
||||||
MergedUnix timeutil.TimeStamp `xorm:"updated INDEX"`
|
MergedUnix timeutil.TimeStamp `xorm:"updated INDEX"`
|
||||||
|
|
||||||
|
isHeadRepoLoaded bool `xorm:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// MustHeadUserName returns the HeadRepo's username if failed return blank
|
// MustHeadUserName returns the HeadRepo's username if failed return blank
|
||||||
func (pr *PullRequest) MustHeadUserName() string {
|
func (pr *PullRequest) MustHeadUserName() string {
|
||||||
if err := pr.LoadHeadRepo(); err != nil {
|
if err := pr.LoadHeadRepo(); err != nil {
|
||||||
log.Error("LoadHeadRepo: %v", err)
|
if !IsErrRepoNotExist(err) {
|
||||||
|
log.Error("LoadHeadRepo: %v", err)
|
||||||
|
} else {
|
||||||
|
log.Warn("LoadHeadRepo %d but repository does not exist: %v", pr.HeadRepoID, err)
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if pr.HeadRepo == nil {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
return pr.HeadRepo.MustOwnerName()
|
return pr.HeadRepo.MustOwnerName()
|
||||||
|
@ -93,38 +120,57 @@ func (pr *PullRequest) LoadAttributes() error {
|
||||||
return pr.loadAttributes(x)
|
return pr.loadAttributes(x)
|
||||||
}
|
}
|
||||||
|
|
||||||
// LoadBaseRepo loads pull request base repository from database
|
func (pr *PullRequest) loadHeadRepo(e Engine) error {
|
||||||
func (pr *PullRequest) LoadBaseRepo() error {
|
if !pr.isHeadRepoLoaded && pr.HeadRepo == nil && pr.HeadRepoID > 0 {
|
||||||
if pr.BaseRepo == nil {
|
if pr.HeadRepoID == pr.BaseRepoID {
|
||||||
if pr.HeadRepoID == pr.BaseRepoID && pr.HeadRepo != nil {
|
if pr.BaseRepo != nil {
|
||||||
pr.BaseRepo = pr.HeadRepo
|
pr.HeadRepo = pr.BaseRepo
|
||||||
return nil
|
return nil
|
||||||
|
} else if pr.Issue != nil && pr.Issue.Repo != nil {
|
||||||
|
pr.HeadRepo = pr.Issue.Repo
|
||||||
|
return nil
|
||||||
|
}
|
||||||
}
|
}
|
||||||
var repo Repository
|
|
||||||
if has, err := x.ID(pr.BaseRepoID).Get(&repo); err != nil {
|
var err error
|
||||||
return err
|
pr.HeadRepo, err = getRepositoryByID(e, pr.HeadRepoID)
|
||||||
} else if !has {
|
if err != nil && !IsErrRepoNotExist(err) { // Head repo maybe deleted, but it should still work
|
||||||
return ErrRepoNotExist{ID: pr.BaseRepoID}
|
return fmt.Errorf("getRepositoryByID(head): %v", err)
|
||||||
|
|
||||||
}
|
}
|
||||||
pr.BaseRepo = &repo
|
pr.isHeadRepoLoaded = true
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// LoadHeadRepo loads pull request head repository from database
|
// LoadHeadRepo loads the head repository
|
||||||
func (pr *PullRequest) LoadHeadRepo() error {
|
func (pr *PullRequest) LoadHeadRepo() error {
|
||||||
if pr.HeadRepo == nil {
|
return pr.loadHeadRepo(x)
|
||||||
if pr.HeadRepoID == pr.BaseRepoID && pr.BaseRepo != nil {
|
}
|
||||||
pr.HeadRepo = pr.BaseRepo
|
|
||||||
return nil
|
// LoadBaseRepo loads the target repository
|
||||||
}
|
func (pr *PullRequest) LoadBaseRepo() error {
|
||||||
var repo Repository
|
return pr.loadBaseRepo(x)
|
||||||
if has, err := x.ID(pr.HeadRepoID).Get(&repo); err != nil {
|
}
|
||||||
return err
|
|
||||||
} else if !has {
|
func (pr *PullRequest) loadBaseRepo(e Engine) (err error) {
|
||||||
return ErrRepoNotExist{ID: pr.HeadRepoID}
|
if pr.BaseRepo != nil {
|
||||||
}
|
return nil
|
||||||
pr.HeadRepo = &repo
|
}
|
||||||
|
|
||||||
|
if pr.HeadRepoID == pr.BaseRepoID && pr.HeadRepo != nil {
|
||||||
|
pr.BaseRepo = pr.HeadRepo
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if pr.Issue != nil && pr.Issue.Repo != nil {
|
||||||
|
pr.BaseRepo = pr.Issue.Repo
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
pr.BaseRepo, err = getRepositoryByID(e, pr.BaseRepoID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("GetRepositoryByID(base): %v", err)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -175,7 +221,16 @@ func (pr *PullRequest) GetDefaultMergeMessage() string {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("Merge branch '%s' of %s/%s into %s", pr.HeadBranch, pr.MustHeadUserName(), pr.HeadRepo.Name, pr.BaseBranch)
|
if err := pr.LoadIssue(); err != nil {
|
||||||
|
log.Error("Cannot load issue %d for PR id %d: Error: %v", pr.IssueID, pr.ID, err)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if pr.BaseRepoID == pr.HeadRepoID {
|
||||||
|
return fmt.Sprintf("Merge pull request '%s' (#%d) from %s into %s", pr.Issue.Title, pr.Issue.Index, pr.HeadBranch, pr.BaseBranch)
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf("Merge pull request '%s' (#%d) from %s:%s into %s", pr.Issue.Title, pr.Issue.Index, pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseBranch)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetCommitMessages returns the commit messages between head and merge base (if there is one)
|
// GetCommitMessages returns the commit messages between head and merge base (if there is one)
|
||||||
|
@ -384,6 +439,13 @@ func (pr *PullRequest) GetDefaultSquashMessage() string {
|
||||||
log.Error("LoadIssue: %v", err)
|
log.Error("LoadIssue: %v", err)
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
if err := pr.LoadBaseRepo(); err != nil {
|
||||||
|
log.Error("LoadBaseRepo: %v", err)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if pr.BaseRepo.UnitEnabled(UnitTypeExternalTracker) {
|
||||||
|
return fmt.Sprintf("%s (!%d)", pr.Issue.Title, pr.Issue.Index)
|
||||||
|
}
|
||||||
return fmt.Sprintf("%s (#%d)", pr.Issue.Title, pr.Issue.Index)
|
return fmt.Sprintf("%s (#%d)", pr.Issue.Title, pr.Issue.Index)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -404,31 +466,20 @@ func (pr *PullRequest) apiFormat(e Engine) *api.PullRequest {
|
||||||
baseBranch *git.Branch
|
baseBranch *git.Branch
|
||||||
headBranch *git.Branch
|
headBranch *git.Branch
|
||||||
baseCommit *git.Commit
|
baseCommit *git.Commit
|
||||||
headCommit *git.Commit
|
|
||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
if err = pr.Issue.loadRepo(e); err != nil {
|
if err = pr.Issue.loadRepo(e); err != nil {
|
||||||
log.Error("loadRepo[%d]: %v", pr.ID, err)
|
log.Error("pr.Issue.loadRepo[%d]: %v", pr.ID, err)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
apiIssue := pr.Issue.apiFormat(e)
|
apiIssue := pr.Issue.apiFormat(e)
|
||||||
if pr.BaseRepo == nil {
|
if err := pr.loadBaseRepo(e); err != nil {
|
||||||
pr.BaseRepo, err = getRepositoryByID(e, pr.BaseRepoID)
|
log.Error("loadBaseRepo[%d]: %v", pr.ID, err)
|
||||||
if err != nil {
|
return nil
|
||||||
log.Error("GetRepositoryById[%d]: %v", pr.ID, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if pr.HeadRepo == nil {
|
|
||||||
pr.HeadRepo, err = getRepositoryByID(e, pr.HeadRepoID)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("GetRepositoryById[%d]: %v", pr.ID, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = pr.Issue.loadRepo(e); err != nil {
|
if err := pr.loadHeadRepo(e); err != nil {
|
||||||
log.Error("pr.Issue.loadRepo[%d]: %v", pr.ID, err)
|
log.Error("loadHeadRepo[%d]: %v", pr.ID, err)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -453,67 +504,21 @@ func (pr *PullRequest) apiFormat(e Engine) *api.PullRequest {
|
||||||
Deadline: apiIssue.Deadline,
|
Deadline: apiIssue.Deadline,
|
||||||
Created: pr.Issue.CreatedUnix.AsTimePtr(),
|
Created: pr.Issue.CreatedUnix.AsTimePtr(),
|
||||||
Updated: pr.Issue.UpdatedUnix.AsTimePtr(),
|
Updated: pr.Issue.UpdatedUnix.AsTimePtr(),
|
||||||
}
|
Base: &api.PRBranchInfo{
|
||||||
baseBranch, err = pr.BaseRepo.GetBranch(pr.BaseBranch)
|
|
||||||
if err != nil {
|
|
||||||
if git.IsErrBranchNotExist(err) {
|
|
||||||
apiPullRequest.Base = nil
|
|
||||||
} else {
|
|
||||||
log.Error("GetBranch[%s]: %v", pr.BaseBranch, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
apiBaseBranchInfo := &api.PRBranchInfo{
|
|
||||||
Name: pr.BaseBranch,
|
Name: pr.BaseBranch,
|
||||||
Ref: pr.BaseBranch,
|
Ref: pr.BaseBranch,
|
||||||
RepoID: pr.BaseRepoID,
|
RepoID: pr.BaseRepoID,
|
||||||
Repository: pr.BaseRepo.innerAPIFormat(e, AccessModeNone, false),
|
Repository: pr.BaseRepo.innerAPIFormat(e, AccessModeNone, false),
|
||||||
}
|
},
|
||||||
baseCommit, err = baseBranch.GetCommit()
|
Head: &api.PRBranchInfo{
|
||||||
if err != nil {
|
Name: pr.HeadBranch,
|
||||||
if git.IsErrNotExist(err) {
|
Ref: fmt.Sprintf("refs/pull/%d/head", pr.Index),
|
||||||
apiBaseBranchInfo.Sha = ""
|
RepoID: -1,
|
||||||
} else {
|
},
|
||||||
log.Error("GetCommit[%s]: %v", baseBranch.Name, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
apiBaseBranchInfo.Sha = baseCommit.ID.String()
|
|
||||||
}
|
|
||||||
apiPullRequest.Base = apiBaseBranchInfo
|
|
||||||
}
|
|
||||||
|
|
||||||
headBranch, err = pr.HeadRepo.GetBranch(pr.HeadBranch)
|
|
||||||
if err != nil {
|
|
||||||
if git.IsErrBranchNotExist(err) {
|
|
||||||
apiPullRequest.Head = nil
|
|
||||||
} else {
|
|
||||||
log.Error("GetBranch[%s]: %v", pr.HeadBranch, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
apiHeadBranchInfo := &api.PRBranchInfo{
|
|
||||||
Name: pr.HeadBranch,
|
|
||||||
Ref: pr.HeadBranch,
|
|
||||||
RepoID: pr.HeadRepoID,
|
|
||||||
Repository: pr.HeadRepo.innerAPIFormat(e, AccessModeNone, false),
|
|
||||||
}
|
|
||||||
headCommit, err = headBranch.GetCommit()
|
|
||||||
if err != nil {
|
|
||||||
if git.IsErrNotExist(err) {
|
|
||||||
apiHeadBranchInfo.Sha = ""
|
|
||||||
} else {
|
|
||||||
log.Error("GetCommit[%s]: %v", headBranch.Name, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
apiHeadBranchInfo.Sha = headCommit.ID.String()
|
|
||||||
}
|
|
||||||
apiPullRequest.Head = apiHeadBranchInfo
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if pr.Status != PullRequestStatusChecking {
|
if pr.Status != PullRequestStatusChecking {
|
||||||
mergeable := pr.Status != PullRequestStatusConflict && !pr.IsWorkInProgress()
|
mergeable := !(pr.Status == PullRequestStatusConflict || pr.Status == PullRequestStatusError) && !pr.IsWorkInProgress()
|
||||||
apiPullRequest.Mergeable = mergeable
|
apiPullRequest.Mergeable = mergeable
|
||||||
}
|
}
|
||||||
if pr.HasMerged {
|
if pr.HasMerged {
|
||||||
|
@ -522,33 +527,78 @@ func (pr *PullRequest) apiFormat(e Engine) *api.PullRequest {
|
||||||
apiPullRequest.MergedBy = pr.Merger.APIFormat()
|
apiPullRequest.MergedBy = pr.Merger.APIFormat()
|
||||||
}
|
}
|
||||||
|
|
||||||
return apiPullRequest
|
baseRepoPath := pr.BaseRepo.repoPath(e)
|
||||||
}
|
baseGitRepo, err := git.OpenRepository(baseRepoPath)
|
||||||
|
if err != nil {
|
||||||
func (pr *PullRequest) getHeadRepo(e Engine) (err error) {
|
log.Error("OpenRepository[%s]: %v", baseRepoPath, err)
|
||||||
pr.HeadRepo, err = getRepositoryByID(e, pr.HeadRepoID)
|
return nil
|
||||||
if err != nil && !IsErrRepoNotExist(err) {
|
|
||||||
return fmt.Errorf("getRepositoryByID(head): %v", err)
|
|
||||||
}
|
}
|
||||||
return nil
|
defer baseGitRepo.Close()
|
||||||
}
|
|
||||||
|
|
||||||
// GetHeadRepo loads the head repository
|
baseBranch, err = baseGitRepo.GetBranch(pr.BaseBranch)
|
||||||
func (pr *PullRequest) GetHeadRepo() error {
|
if err != nil && !git.IsErrBranchNotExist(err) {
|
||||||
return pr.getHeadRepo(x)
|
log.Error("GetBranch[%s]: %v", pr.BaseBranch, err)
|
||||||
}
|
|
||||||
|
|
||||||
// GetBaseRepo loads the target repository
|
|
||||||
func (pr *PullRequest) GetBaseRepo() (err error) {
|
|
||||||
if pr.BaseRepo != nil {
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
pr.BaseRepo, err = GetRepositoryByID(pr.BaseRepoID)
|
if err == nil {
|
||||||
if err != nil {
|
baseCommit, err = baseBranch.GetCommit()
|
||||||
return fmt.Errorf("GetRepositoryByID(base): %v", err)
|
if err != nil && !git.IsErrNotExist(err) {
|
||||||
|
log.Error("GetCommit[%s]: %v", baseBranch.Name, err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
apiPullRequest.Base.Sha = baseCommit.ID.String()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return nil
|
|
||||||
|
if pr.HeadRepo != nil {
|
||||||
|
apiPullRequest.Head.RepoID = pr.HeadRepo.ID
|
||||||
|
apiPullRequest.Head.Repository = pr.HeadRepo.innerAPIFormat(e, AccessModeNone, false)
|
||||||
|
|
||||||
|
var headGitRepo *git.Repository
|
||||||
|
if pr.HeadRepoID == pr.BaseRepoID {
|
||||||
|
headGitRepo = baseGitRepo
|
||||||
|
} else {
|
||||||
|
headRepoPath := pr.HeadRepo.repoPath(e)
|
||||||
|
headGitRepo, err = git.OpenRepository(headRepoPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("OpenRepository[%s]: %v", headRepoPath, err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
defer headGitRepo.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
headBranch, err = headGitRepo.GetBranch(pr.HeadBranch)
|
||||||
|
if err != nil && !git.IsErrBranchNotExist(err) {
|
||||||
|
log.Error("GetBranch[%s]: %v", pr.HeadBranch, err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if git.IsErrBranchNotExist(err) {
|
||||||
|
headCommitID, err := headGitRepo.GetRefCommitID(apiPullRequest.Head.Ref)
|
||||||
|
if err != nil && !git.IsErrNotExist(err) {
|
||||||
|
log.Error("GetCommit[%s]: %v", headBranch.Name, err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if err == nil {
|
||||||
|
apiPullRequest.Head.Sha = headCommitID
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
commit, err := headBranch.GetCommit()
|
||||||
|
if err != nil && !git.IsErrNotExist(err) {
|
||||||
|
log.Error("GetCommit[%s]: %v", headBranch.Name, err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if err == nil {
|
||||||
|
apiPullRequest.Head.Ref = pr.HeadBranch
|
||||||
|
apiPullRequest.Head.Sha = commit.ID.String()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiPullRequest
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsChecking returns true if this pull request is still checking conflict.
|
// IsChecking returns true if this pull request is still checking conflict.
|
||||||
|
@ -563,7 +613,7 @@ func (pr *PullRequest) CanAutoMerge() bool {
|
||||||
|
|
||||||
// GetLastCommitStatus returns the last commit status for this pull request.
|
// GetLastCommitStatus returns the last commit status for this pull request.
|
||||||
func (pr *PullRequest) GetLastCommitStatus() (status *CommitStatus, err error) {
|
func (pr *PullRequest) GetLastCommitStatus() (status *CommitStatus, err error) {
|
||||||
if err = pr.GetHeadRepo(); err != nil {
|
if err = pr.LoadHeadRepo(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -617,8 +667,8 @@ func (pr *PullRequest) CheckUserAllowedToMerge(doer *User) (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if pr.BaseRepo == nil {
|
if pr.BaseRepo == nil {
|
||||||
if err = pr.GetBaseRepo(); err != nil {
|
if err = pr.LoadBaseRepo(); err != nil {
|
||||||
return fmt.Errorf("GetBaseRepo: %v", err)
|
return fmt.Errorf("LoadBaseRepo: %v", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -634,44 +684,66 @@ func (pr *PullRequest) CheckUserAllowedToMerge(doer *User) (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetMerged sets a pull request to merged and closes the corresponding issue
|
// SetMerged sets a pull request to merged and closes the corresponding issue
|
||||||
func (pr *PullRequest) SetMerged() (err error) {
|
func (pr *PullRequest) SetMerged() (bool, error) {
|
||||||
if pr.HasMerged {
|
if pr.HasMerged {
|
||||||
return fmt.Errorf("PullRequest[%d] already merged", pr.Index)
|
return false, fmt.Errorf("PullRequest[%d] already merged", pr.Index)
|
||||||
}
|
}
|
||||||
if pr.MergedCommitID == "" || pr.MergedUnix == 0 || pr.Merger == nil {
|
if pr.MergedCommitID == "" || pr.MergedUnix == 0 || pr.Merger == nil {
|
||||||
return fmt.Errorf("Unable to merge PullRequest[%d], some required fields are empty", pr.Index)
|
return false, fmt.Errorf("Unable to merge PullRequest[%d], some required fields are empty", pr.Index)
|
||||||
}
|
}
|
||||||
|
|
||||||
pr.HasMerged = true
|
pr.HasMerged = true
|
||||||
|
|
||||||
sess := x.NewSession()
|
sess := x.NewSession()
|
||||||
defer sess.Close()
|
defer sess.Close()
|
||||||
if err = sess.Begin(); err != nil {
|
if err := sess.Begin(); err != nil {
|
||||||
return err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = pr.loadIssue(sess); err != nil {
|
if _, err := sess.Exec("UPDATE `issue` SET `repo_id` = `repo_id` WHERE `id` = ?", pr.IssueID); err != nil {
|
||||||
return err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = pr.Issue.loadRepo(sess); err != nil {
|
if _, err := sess.Exec("UPDATE `pull_request` SET `issue_id` = `issue_id` WHERE `id` = ?", pr.ID); err != nil {
|
||||||
return err
|
return false, err
|
||||||
}
|
|
||||||
if err = pr.Issue.Repo.getOwner(sess); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err = pr.Issue.changeStatus(sess, pr.Merger, true); err != nil {
|
pr.Issue = nil
|
||||||
return fmt.Errorf("Issue.changeStatus: %v", err)
|
if err := pr.loadIssue(sess); err != nil {
|
||||||
}
|
return false, err
|
||||||
if _, err = sess.ID(pr.ID).Cols("has_merged, status, merged_commit_id, merger_id, merged_unix").Update(pr); err != nil {
|
|
||||||
return fmt.Errorf("update pull request: %v", err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = sess.Commit(); err != nil {
|
if tmpPr, err := getPullRequestByID(sess, pr.ID); err != nil {
|
||||||
return fmt.Errorf("Commit: %v", err)
|
return false, err
|
||||||
|
} else if tmpPr.HasMerged {
|
||||||
|
if pr.Issue.IsClosed {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
return false, fmt.Errorf("PullRequest[%d] already merged but it's associated issue [%d] is not closed", pr.Index, pr.IssueID)
|
||||||
|
} else if pr.Issue.IsClosed {
|
||||||
|
return false, fmt.Errorf("PullRequest[%d] already closed", pr.Index)
|
||||||
}
|
}
|
||||||
return nil
|
|
||||||
|
if err := pr.Issue.loadRepo(sess); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := pr.Issue.Repo.getOwner(sess); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := pr.Issue.changeStatus(sess, pr.Merger, true); err != nil {
|
||||||
|
return false, fmt.Errorf("Issue.changeStatus: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := sess.Where("id = ?", pr.ID).Cols("has_merged, status, merged_commit_id, merger_id, merged_unix").Update(pr); err != nil {
|
||||||
|
return false, fmt.Errorf("Failed to update pr[%d]: %v", pr.ID, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := sess.Commit(); err != nil {
|
||||||
|
return false, fmt.Errorf("Commit: %v", err)
|
||||||
|
}
|
||||||
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewPullRequest creates new pull request with labels for repository.
|
// NewPullRequest creates new pull request with labels for repository.
|
||||||
|
@ -830,6 +902,12 @@ func (pr *PullRequest) UpdateCols(cols ...string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdateColsIfNotMerged updates specific fields of a pull request if it has not been merged
|
||||||
|
func (pr *PullRequest) UpdateColsIfNotMerged(cols ...string) error {
|
||||||
|
_, err := x.Where("id = ? AND has_merged = ?", pr.ID, false).Cols(cols...).Update(pr)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
// IsWorkInProgress determine if the Pull Request is a Work In Progress by its title
|
// IsWorkInProgress determine if the Pull Request is a Work In Progress by its title
|
||||||
func (pr *PullRequest) IsWorkInProgress() bool {
|
func (pr *PullRequest) IsWorkInProgress() bool {
|
||||||
if err := pr.LoadIssue(); err != nil {
|
if err := pr.LoadIssue(); err != nil {
|
||||||
|
@ -869,7 +947,7 @@ func (pr *PullRequest) GetWorkInProgressPrefix() string {
|
||||||
// IsHeadEqualWithBranch returns if the commits of branchName are available in pull request head
|
// IsHeadEqualWithBranch returns if the commits of branchName are available in pull request head
|
||||||
func (pr *PullRequest) IsHeadEqualWithBranch(branchName string) (bool, error) {
|
func (pr *PullRequest) IsHeadEqualWithBranch(branchName string) (bool, error) {
|
||||||
var err error
|
var err error
|
||||||
if err = pr.GetBaseRepo(); err != nil {
|
if err = pr.LoadBaseRepo(); err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
baseGitRepo, err := git.OpenRepository(pr.BaseRepo.RepoPath())
|
baseGitRepo, err := git.OpenRepository(pr.BaseRepo.RepoPath())
|
||||||
|
@ -881,9 +959,12 @@ func (pr *PullRequest) IsHeadEqualWithBranch(branchName string) (bool, error) {
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = pr.GetHeadRepo(); err != nil {
|
if err = pr.LoadHeadRepo(); err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
if pr.HeadRepo == nil {
|
||||||
|
return false, ErrHeadRepoMissed{pr.ID, pr.HeadRepoID}
|
||||||
|
}
|
||||||
headGitRepo, err := git.OpenRepository(pr.HeadRepo.RepoPath())
|
headGitRepo, err := git.OpenRepository(pr.HeadRepo.RepoPath())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
|
|
|
@ -12,7 +12,7 @@ import (
|
||||||
|
|
||||||
// SignMerge determines if we should sign a PR merge commit to the base repository
|
// SignMerge determines if we should sign a PR merge commit to the base repository
|
||||||
func (pr *PullRequest) SignMerge(u *User, tmpBasePath, baseCommit, headCommit string) (bool, string) {
|
func (pr *PullRequest) SignMerge(u *User, tmpBasePath, baseCommit, headCommit string) (bool, string) {
|
||||||
if err := pr.GetBaseRepo(); err != nil {
|
if err := pr.LoadBaseRepo(); err != nil {
|
||||||
log.Error("Unable to get Base Repo for pull request")
|
log.Error("Unable to get Base Repo for pull request")
|
||||||
return false, ""
|
return false, ""
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,7 @@ package models
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/structs"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -31,29 +32,36 @@ func TestPullRequest_LoadIssue(t *testing.T) {
|
||||||
|
|
||||||
func TestPullRequest_APIFormat(t *testing.T) {
|
func TestPullRequest_APIFormat(t *testing.T) {
|
||||||
assert.NoError(t, PrepareTestDatabase())
|
assert.NoError(t, PrepareTestDatabase())
|
||||||
|
headRepo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
|
||||||
pr := AssertExistsAndLoadBean(t, &PullRequest{ID: 1}).(*PullRequest)
|
pr := AssertExistsAndLoadBean(t, &PullRequest{ID: 1}).(*PullRequest)
|
||||||
assert.NoError(t, pr.LoadAttributes())
|
assert.NoError(t, pr.LoadAttributes())
|
||||||
assert.NoError(t, pr.LoadIssue())
|
assert.NoError(t, pr.LoadIssue())
|
||||||
apiPullRequest := pr.APIFormat()
|
apiPullRequest := pr.APIFormat()
|
||||||
assert.NotNil(t, apiPullRequest)
|
assert.NotNil(t, apiPullRequest)
|
||||||
assert.Nil(t, apiPullRequest.Head)
|
assert.EqualValues(t, &structs.PRBranchInfo{
|
||||||
|
Name: "branch1",
|
||||||
|
Ref: "refs/pull/2/head",
|
||||||
|
Sha: "4a357436d925b5c974181ff12a994538ddc5a269",
|
||||||
|
RepoID: 1,
|
||||||
|
Repository: headRepo.APIFormat(models.AccessModeNone),
|
||||||
|
}, apiPullRequest.Head)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPullRequest_GetBaseRepo(t *testing.T) {
|
func TestPullRequest_LoadBaseRepo(t *testing.T) {
|
||||||
assert.NoError(t, PrepareTestDatabase())
|
assert.NoError(t, PrepareTestDatabase())
|
||||||
pr := AssertExistsAndLoadBean(t, &PullRequest{ID: 1}).(*PullRequest)
|
pr := AssertExistsAndLoadBean(t, &PullRequest{ID: 1}).(*PullRequest)
|
||||||
assert.NoError(t, pr.GetBaseRepo())
|
assert.NoError(t, pr.LoadBaseRepo())
|
||||||
assert.NotNil(t, pr.BaseRepo)
|
assert.NotNil(t, pr.BaseRepo)
|
||||||
assert.Equal(t, pr.BaseRepoID, pr.BaseRepo.ID)
|
assert.Equal(t, pr.BaseRepoID, pr.BaseRepo.ID)
|
||||||
assert.NoError(t, pr.GetBaseRepo())
|
assert.NoError(t, pr.LoadBaseRepo())
|
||||||
assert.NotNil(t, pr.BaseRepo)
|
assert.NotNil(t, pr.BaseRepo)
|
||||||
assert.Equal(t, pr.BaseRepoID, pr.BaseRepo.ID)
|
assert.Equal(t, pr.BaseRepoID, pr.BaseRepo.ID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPullRequest_GetHeadRepo(t *testing.T) {
|
func TestPullRequest_LoadHeadRepo(t *testing.T) {
|
||||||
assert.NoError(t, PrepareTestDatabase())
|
assert.NoError(t, PrepareTestDatabase())
|
||||||
pr := AssertExistsAndLoadBean(t, &PullRequest{ID: 1}).(*PullRequest)
|
pr := AssertExistsAndLoadBean(t, &PullRequest{ID: 1}).(*PullRequest)
|
||||||
assert.NoError(t, pr.GetHeadRepo())
|
assert.NoError(t, pr.LoadHeadRepo())
|
||||||
assert.NotNil(t, pr.HeadRepo)
|
assert.NotNil(t, pr.HeadRepo)
|
||||||
assert.Equal(t, pr.HeadRepoID, pr.HeadRepo.ID)
|
assert.Equal(t, pr.HeadRepoID, pr.HeadRepo.ID)
|
||||||
}
|
}
|
||||||
|
|
|
@ -204,6 +204,14 @@ type Repository struct {
|
||||||
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
|
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SanitizedOriginalURL returns a sanitized OriginalURL
|
||||||
|
func (repo *Repository) SanitizedOriginalURL() string {
|
||||||
|
if repo.OriginalURL == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return util.SanitizeURLCredentials(repo.OriginalURL, false)
|
||||||
|
}
|
||||||
|
|
||||||
// ColorFormat returns a colored string to represent this repo
|
// ColorFormat returns a colored string to represent this repo
|
||||||
func (repo *Repository) ColorFormat(s fmt.State) {
|
func (repo *Repository) ColorFormat(s fmt.State) {
|
||||||
var ownerName interface{}
|
var ownerName interface{}
|
||||||
|
@ -1902,6 +1910,12 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(repo.Topics) > 0 {
|
||||||
|
if err = removeTopicsFromRepo(sess, repo.ID); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// FIXME: Remove repository files should be executed after transaction succeed.
|
// FIXME: Remove repository files should be executed after transaction succeed.
|
||||||
repoPath := repo.repoPath(sess)
|
repoPath := repo.repoPath(sess)
|
||||||
removeAllWithNotice(sess, "Delete repository files", repoPath)
|
removeAllWithNotice(sess, "Delete repository files", repoPath)
|
||||||
|
|
|
@ -202,3 +202,23 @@ func (repo *Repository) getRepoTeams(e Engine) (teams []*Team, err error) {
|
||||||
func (repo *Repository) GetRepoTeams() ([]*Team, error) {
|
func (repo *Repository) GetRepoTeams() ([]*Team, error) {
|
||||||
return repo.getRepoTeams(x)
|
return repo.getRepoTeams(x)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsOwnerMemberCollaborator checks if a provided user is the owner, a collaborator or a member of a team in a repository
|
||||||
|
func (repo *Repository) IsOwnerMemberCollaborator(userID int64) (bool, error) {
|
||||||
|
if repo.OwnerID == userID {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
teamMember, err := x.Join("INNER", "team_repo", "team_repo.team_id = team_user.team_id").
|
||||||
|
Join("INNER", "team_unit", "team_unit.team_id = team_user.team_id").
|
||||||
|
Where("team_repo.repo_id = ?", repo.ID).
|
||||||
|
And("team_unit.`type` = ?", UnitTypeCode).
|
||||||
|
And("team_user.uid = ?", userID).Table("team_user").Exist(&TeamUser{})
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
if teamMember {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return x.Get(&Collaboration{RepoID: repo.ID, UserID: userID})
|
||||||
|
}
|
||||||
|
|
|
@ -124,41 +124,43 @@ func generateRepoCommit(e Engine, repo, templateRepo, generateRepo *Repository,
|
||||||
return fmt.Errorf("checkGiteaTemplate: %v", err)
|
return fmt.Errorf("checkGiteaTemplate: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := os.Remove(gt.Path); err != nil {
|
if gt != nil {
|
||||||
return fmt.Errorf("remove .giteatemplate: %v", err)
|
if err := os.Remove(gt.Path); err != nil {
|
||||||
}
|
return fmt.Errorf("remove .giteatemplate: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
// Avoid walking tree if there are no globs
|
// Avoid walking tree if there are no globs
|
||||||
if len(gt.Globs()) > 0 {
|
if len(gt.Globs()) > 0 {
|
||||||
tmpDirSlash := strings.TrimSuffix(filepath.ToSlash(tmpDir), "/") + "/"
|
tmpDirSlash := strings.TrimSuffix(filepath.ToSlash(tmpDir), "/") + "/"
|
||||||
if err := filepath.Walk(tmpDirSlash, func(path string, info os.FileInfo, walkErr error) error {
|
if err := filepath.Walk(tmpDirSlash, func(path string, info os.FileInfo, walkErr error) error {
|
||||||
if walkErr != nil {
|
if walkErr != nil {
|
||||||
return walkErr
|
return walkErr
|
||||||
}
|
|
||||||
|
|
||||||
if info.IsDir() {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
base := strings.TrimPrefix(filepath.ToSlash(path), tmpDirSlash)
|
|
||||||
for _, g := range gt.Globs() {
|
|
||||||
if g.Match(base) {
|
|
||||||
content, err := ioutil.ReadFile(path)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := ioutil.WriteFile(path,
|
|
||||||
[]byte(generateExpansion(string(content), templateRepo, generateRepo)),
|
|
||||||
0644); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if info.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
base := strings.TrimPrefix(filepath.ToSlash(path), tmpDirSlash)
|
||||||
|
for _, g := range gt.Globs() {
|
||||||
|
if g.Match(base) {
|
||||||
|
content, err := ioutil.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := ioutil.WriteFile(path,
|
||||||
|
[]byte(generateExpansion(string(content), templateRepo, generateRepo)),
|
||||||
|
0644); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -315,6 +315,17 @@ func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
|
||||||
|
|
||||||
// accessibleRepositoryCondition takes a user a returns a condition for checking if a repository is accessible
|
// accessibleRepositoryCondition takes a user a returns a condition for checking if a repository is accessible
|
||||||
func accessibleRepositoryCondition(userID int64) builder.Cond {
|
func accessibleRepositoryCondition(userID int64) builder.Cond {
|
||||||
|
if userID <= 0 {
|
||||||
|
return builder.And(
|
||||||
|
builder.Eq{"`repository`.is_private": false},
|
||||||
|
builder.Or(
|
||||||
|
// A. Aren't in organisations __OR__
|
||||||
|
builder.NotIn("`repository`.owner_id", builder.Select("id").From("`user`").Where(builder.Eq{"type": UserTypeOrganization})),
|
||||||
|
// B. Is a public organisation.
|
||||||
|
builder.In("`repository`.owner_id", builder.Select("id").From("`user`").Where(builder.Eq{"visibility": structs.VisibleTypePublic}))),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
return builder.Or(
|
return builder.Or(
|
||||||
// 1. Be able to see all non-private repositories that either:
|
// 1. Be able to see all non-private repositories that either:
|
||||||
builder.And(
|
builder.And(
|
||||||
|
@ -349,6 +360,12 @@ func SearchRepositoryByName(opts *SearchRepoOptions) (RepositoryList, int64, err
|
||||||
return SearchRepository(opts)
|
return SearchRepository(opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AccessibleRepoIDsQuery queries accessible repository ids. Usable as a subquery wherever repo ids need to be filtered.
|
||||||
|
func AccessibleRepoIDsQuery(userID int64) *builder.Builder {
|
||||||
|
// NB: Please note this code needs to still work if user is nil
|
||||||
|
return builder.Select("id").From("repository").Where(accessibleRepositoryCondition(userID))
|
||||||
|
}
|
||||||
|
|
||||||
// FindUserAccessibleRepoIDs find all accessible repositories' ID by user's id
|
// FindUserAccessibleRepoIDs find all accessible repositories' ID by user's id
|
||||||
func FindUserAccessibleRepoIDs(userID int64) ([]int64, error) {
|
func FindUserAccessibleRepoIDs(userID int64) ([]int64, error) {
|
||||||
var accessCond builder.Cond = builder.Eq{"is_private": false}
|
var accessCond builder.Cond = builder.Eq{"is_private": false}
|
||||||
|
|
|
@ -129,7 +129,7 @@ func addTopicByNameToRepo(e Engine, repoID int64, topicName string) (*Topic, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// removeTopicFromRepo remove a topic from a repo and decrements the topic repo count
|
// removeTopicFromRepo remove a topic from a repo and decrements the topic repo count
|
||||||
func removeTopicFromRepo(repoID int64, topic *Topic, e Engine) error {
|
func removeTopicFromRepo(e Engine, repoID int64, topic *Topic) error {
|
||||||
topic.RepoCount--
|
topic.RepoCount--
|
||||||
if _, err := e.ID(topic.ID).Cols("repo_count").Update(topic); err != nil {
|
if _, err := e.ID(topic.ID).Cols("repo_count").Update(topic); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -145,6 +145,24 @@ func removeTopicFromRepo(repoID int64, topic *Topic, e Engine) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// removeTopicsFromRepo remove all topics from the repo and decrements respective topics repo count
|
||||||
|
func removeTopicsFromRepo(e Engine, repoID int64) error {
|
||||||
|
_, err := e.Where(
|
||||||
|
builder.In("id",
|
||||||
|
builder.Select("topic_id").From("repo_topic").Where(builder.Eq{"repo_id": repoID}),
|
||||||
|
),
|
||||||
|
).Cols("repo_count").SetExpr("repo_count", "repo_count-1").Update(&Topic{})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err = e.Delete(&RepoTopic{RepoID: repoID}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// FindTopicOptions represents the options when fdin topics
|
// FindTopicOptions represents the options when fdin topics
|
||||||
type FindTopicOptions struct {
|
type FindTopicOptions struct {
|
||||||
RepoID int64
|
RepoID int64
|
||||||
|
@ -217,7 +235,7 @@ func DeleteTopic(repoID int64, topicName string) (*Topic, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
err = removeTopicFromRepo(repoID, topic, x)
|
err = removeTopicFromRepo(x, repoID, topic)
|
||||||
|
|
||||||
return topic, err
|
return topic, err
|
||||||
}
|
}
|
||||||
|
@ -278,7 +296,7 @@ func SaveTopics(repoID int64, topicNames ...string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, topic := range removeTopics {
|
for _, topic := range removeTopics {
|
||||||
err := removeTopicFromRepo(repoID, topic, sess)
|
err := removeTopicFromRepo(sess, repoID, topic)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,7 +84,7 @@ func MainTest(m *testing.M, pathToGiteaRoot string) {
|
||||||
|
|
||||||
func createTestEngine(fixturesDir string) error {
|
func createTestEngine(fixturesDir string) error {
|
||||||
var err error
|
var err error
|
||||||
x, err = xorm.NewEngine("sqlite3", "file::memory:?cache=shared")
|
x, err = xorm.NewEngine("sqlite3", "file::memory:?cache=shared&_txlock=immediate")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@ import (
|
||||||
"image/png"
|
"image/png"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
@ -87,6 +88,9 @@ var (
|
||||||
|
|
||||||
// ErrUnsupportedLoginType login source is unknown error
|
// ErrUnsupportedLoginType login source is unknown error
|
||||||
ErrUnsupportedLoginType = errors.New("Login source is unknown")
|
ErrUnsupportedLoginType = errors.New("Login source is unknown")
|
||||||
|
|
||||||
|
// Characters prohibited in a user name (anything except A-Za-z0-9_.-)
|
||||||
|
alphaDashDotPattern = regexp.MustCompile(`[^\w-\.]`)
|
||||||
)
|
)
|
||||||
|
|
||||||
// User represents the object of individual and member of organization.
|
// User represents the object of individual and member of organization.
|
||||||
|
@ -503,7 +507,7 @@ func (u *User) ValidatePassword(passwd string) bool {
|
||||||
|
|
||||||
// IsPasswordSet checks if the password is set or left empty
|
// IsPasswordSet checks if the password is set or left empty
|
||||||
func (u *User) IsPasswordSet() bool {
|
func (u *User) IsPasswordSet() bool {
|
||||||
return len(u.Passwd) > 0
|
return !u.ValidatePassword("")
|
||||||
}
|
}
|
||||||
|
|
||||||
// UploadAvatar saves custom avatar for user.
|
// UploadAvatar saves custom avatar for user.
|
||||||
|
@ -708,9 +712,11 @@ func (u *User) DisplayName() string {
|
||||||
// GetDisplayName returns full name if it's not empty and DEFAULT_SHOW_FULL_NAME is set,
|
// GetDisplayName returns full name if it's not empty and DEFAULT_SHOW_FULL_NAME is set,
|
||||||
// returns username otherwise.
|
// returns username otherwise.
|
||||||
func (u *User) GetDisplayName() string {
|
func (u *User) GetDisplayName() string {
|
||||||
trimmed := strings.TrimSpace(u.FullName)
|
if setting.UI.DefaultShowFullName {
|
||||||
if len(trimmed) > 0 && setting.UI.DefaultShowFullName {
|
trimmed := strings.TrimSpace(u.FullName)
|
||||||
return trimmed
|
if len(trimmed) > 0 {
|
||||||
|
return trimmed
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return u.Name
|
return u.Name
|
||||||
}
|
}
|
||||||
|
@ -819,7 +825,9 @@ var (
|
||||||
"issues",
|
"issues",
|
||||||
"js",
|
"js",
|
||||||
"less",
|
"less",
|
||||||
|
"manifest.json",
|
||||||
"metrics",
|
"metrics",
|
||||||
|
"milestones",
|
||||||
"new",
|
"new",
|
||||||
"notifications",
|
"notifications",
|
||||||
"org",
|
"org",
|
||||||
|
@ -868,6 +876,11 @@ func isUsableName(names, patterns []string, name string) error {
|
||||||
|
|
||||||
// IsUsableUsername returns an error when a username is reserved
|
// IsUsableUsername returns an error when a username is reserved
|
||||||
func IsUsableUsername(name string) error {
|
func IsUsableUsername(name string) error {
|
||||||
|
// Validate username make sure it satisfies requirement.
|
||||||
|
if alphaDashDotPattern.MatchString(name) {
|
||||||
|
// Note: usually this error is normally caught up earlier in the UI
|
||||||
|
return ErrNameCharsNotAllowed{Name: name}
|
||||||
|
}
|
||||||
return isUsableName(reservedUsernames, reservedUserPatterns, name)
|
return isUsableName(reservedUsernames, reservedUserPatterns, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -987,7 +1000,7 @@ func VerifyActiveEmailCode(code, email string) *EmailAddress {
|
||||||
data := com.ToStr(user.ID) + email + user.LowerName + user.Passwd + user.Rands
|
data := com.ToStr(user.ID) + email + user.LowerName + user.Passwd + user.Rands
|
||||||
|
|
||||||
if base.VerifyTimeLimitCode(data, minutes, prefix) {
|
if base.VerifyTimeLimitCode(data, minutes, prefix) {
|
||||||
emailAddress := &EmailAddress{Email: email}
|
emailAddress := &EmailAddress{UID: user.ID, Email: email}
|
||||||
if has, _ := x.Get(emailAddress); has {
|
if has, _ := x.Get(emailAddress); has {
|
||||||
return emailAddress
|
return emailAddress
|
||||||
}
|
}
|
||||||
|
@ -1760,6 +1773,15 @@ func SyncExternalUsers(ctx context.Context) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(sr) == 0 {
|
||||||
|
if !s.LDAP().AllowDeactivateAll {
|
||||||
|
log.Error("LDAP search found no entries but did not report an error. Refusing to deactivate all users")
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
log.Warn("LDAP search found no entries but did not report an error. All users will be deactivated as per settings")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for _, su := range sr {
|
for _, su := range sr {
|
||||||
select {
|
select {
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
// Copyright 2016 The Gogs Authors. All rights reserved.
|
// Copyright 2016 The Gogs Authors. All rights reserved.
|
||||||
|
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||||
// Use of this source code is governed by a MIT-style
|
// Use of this source code is governed by a MIT-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
@ -8,6 +9,12 @@ import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
|
"xorm.io/builder"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -54,13 +61,66 @@ func GetEmailAddresses(uid int64) ([]*EmailAddress, error) {
|
||||||
if !isPrimaryFound {
|
if !isPrimaryFound {
|
||||||
emails = append(emails, &EmailAddress{
|
emails = append(emails, &EmailAddress{
|
||||||
Email: u.Email,
|
Email: u.Email,
|
||||||
IsActivated: true,
|
IsActivated: u.IsActive,
|
||||||
IsPrimary: true,
|
IsPrimary: true,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return emails, nil
|
return emails, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetEmailAddressByID gets a user's email address by ID
|
||||||
|
func GetEmailAddressByID(uid, id int64) (*EmailAddress, error) {
|
||||||
|
// User ID is required for security reasons
|
||||||
|
email := &EmailAddress{ID: id, UID: uid}
|
||||||
|
if has, err := x.Get(email); err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if !has {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return email, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func isEmailActive(e Engine, email string, userID, emailID int64) (bool, error) {
|
||||||
|
if len(email) == 0 {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Can't filter by boolean field unless it's explicit
|
||||||
|
cond := builder.NewCond()
|
||||||
|
cond = cond.And(builder.Eq{"email": email}, builder.Neq{"id": emailID})
|
||||||
|
if setting.Service.RegisterEmailConfirm {
|
||||||
|
// Inactive (unvalidated) addresses don't count as active if email validation is required
|
||||||
|
cond = cond.And(builder.Eq{"is_activated": true})
|
||||||
|
}
|
||||||
|
|
||||||
|
em := EmailAddress{}
|
||||||
|
|
||||||
|
if has, err := e.Where(cond).Get(&em); has || err != nil {
|
||||||
|
if has {
|
||||||
|
log.Info("isEmailActive('%s',%d,%d) found duplicate in email ID %d", email, userID, emailID, em.ID)
|
||||||
|
}
|
||||||
|
return has, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Can't filter by boolean field unless it's explicit
|
||||||
|
cond = builder.NewCond()
|
||||||
|
cond = cond.And(builder.Eq{"email": email}, builder.Neq{"id": userID})
|
||||||
|
if setting.Service.RegisterEmailConfirm {
|
||||||
|
cond = cond.And(builder.Eq{"is_active": true})
|
||||||
|
}
|
||||||
|
|
||||||
|
us := User{}
|
||||||
|
|
||||||
|
if has, err := e.Where(cond).Get(&us); has || err != nil {
|
||||||
|
if has {
|
||||||
|
log.Info("isEmailActive('%s',%d,%d) found duplicate in user ID %d", email, userID, emailID, us.ID)
|
||||||
|
}
|
||||||
|
return has, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
func isEmailUsed(e Engine, email string) (bool, error) {
|
func isEmailUsed(e Engine, email string) (bool, error) {
|
||||||
if len(email) == 0 {
|
if len(email) == 0 {
|
||||||
return true, nil
|
return true, nil
|
||||||
|
@ -118,31 +178,30 @@ func AddEmailAddresses(emails []*EmailAddress) error {
|
||||||
|
|
||||||
// Activate activates the email address to given user.
|
// Activate activates the email address to given user.
|
||||||
func (email *EmailAddress) Activate() error {
|
func (email *EmailAddress) Activate() error {
|
||||||
user, err := GetUserByID(email.UID)
|
sess := x.NewSession()
|
||||||
|
defer sess.Close()
|
||||||
|
if err := sess.Begin(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := email.updateActivation(sess, true); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return sess.Commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (email *EmailAddress) updateActivation(e Engine, activate bool) error {
|
||||||
|
user, err := getUserByID(e, email.UID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if user.Rands, err = GetUserSalt(); err != nil {
|
if user.Rands, err = GetUserSalt(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
email.IsActivated = activate
|
||||||
sess := x.NewSession()
|
if _, err := e.ID(email.ID).Cols("is_activated").Update(email); err != nil {
|
||||||
defer sess.Close()
|
|
||||||
if err = sess.Begin(); err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
return updateUserCols(e, user, "rands")
|
||||||
email.IsActivated = true
|
|
||||||
if _, err := sess.
|
|
||||||
ID(email.ID).
|
|
||||||
Cols("is_activated").
|
|
||||||
Update(email); err != nil {
|
|
||||||
return err
|
|
||||||
} else if err = updateUserCols(sess, user, "rands"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return sess.Commit()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeleteEmailAddress deletes an email address of given user.
|
// DeleteEmailAddress deletes an email address of given user.
|
||||||
|
@ -201,7 +260,7 @@ func MakeEmailPrimary(email *EmailAddress) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure the former primary email doesn't disappear.
|
// Make sure the former primary email doesn't disappear.
|
||||||
formerPrimaryEmail := &EmailAddress{Email: user.Email}
|
formerPrimaryEmail := &EmailAddress{UID: user.ID, Email: user.Email}
|
||||||
has, err = x.Get(formerPrimaryEmail)
|
has, err = x.Get(formerPrimaryEmail)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -228,3 +287,199 @@ func MakeEmailPrimary(email *EmailAddress) error {
|
||||||
|
|
||||||
return sess.Commit()
|
return sess.Commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SearchEmailOrderBy is used to sort the results from SearchEmails()
|
||||||
|
type SearchEmailOrderBy string
|
||||||
|
|
||||||
|
func (s SearchEmailOrderBy) String() string {
|
||||||
|
return string(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strings for sorting result
|
||||||
|
const (
|
||||||
|
SearchEmailOrderByEmail SearchEmailOrderBy = "emails.email ASC, is_primary DESC, sortid ASC"
|
||||||
|
SearchEmailOrderByEmailReverse SearchEmailOrderBy = "emails.email DESC, is_primary ASC, sortid DESC"
|
||||||
|
SearchEmailOrderByName SearchEmailOrderBy = "`user`.lower_name ASC, is_primary DESC, sortid ASC"
|
||||||
|
SearchEmailOrderByNameReverse SearchEmailOrderBy = "`user`.lower_name DESC, is_primary ASC, sortid DESC"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SearchEmailOptions are options to search e-mail addresses for the admin panel
|
||||||
|
type SearchEmailOptions struct {
|
||||||
|
Page int
|
||||||
|
PageSize int // Can be smaller than or equal to setting.UI.ExplorePagingNum
|
||||||
|
Keyword string
|
||||||
|
SortType SearchEmailOrderBy
|
||||||
|
IsPrimary util.OptionalBool
|
||||||
|
IsActivated util.OptionalBool
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchEmailResult is an e-mail address found in the user or email_address table
|
||||||
|
type SearchEmailResult struct {
|
||||||
|
UID int64
|
||||||
|
Email string
|
||||||
|
IsActivated bool
|
||||||
|
IsPrimary bool
|
||||||
|
// From User
|
||||||
|
Name string
|
||||||
|
FullName string
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchEmails takes options i.e. keyword and part of email name to search,
|
||||||
|
// it returns results in given range and number of total results.
|
||||||
|
func SearchEmails(opts *SearchEmailOptions) ([]*SearchEmailResult, int64, error) {
|
||||||
|
// Unfortunately, UNION support for SQLite in xorm is currently broken, so we must
|
||||||
|
// build the SQL ourselves.
|
||||||
|
where := make([]string, 0, 5)
|
||||||
|
args := make([]interface{}, 0, 5)
|
||||||
|
|
||||||
|
emailsSQL := "(SELECT id as sortid, uid, email, is_activated, 0 as is_primary " +
|
||||||
|
"FROM email_address " +
|
||||||
|
"UNION ALL " +
|
||||||
|
"SELECT id as sortid, id AS uid, email, is_active AS is_activated, 1 as is_primary " +
|
||||||
|
"FROM `user` " +
|
||||||
|
"WHERE type = ?) AS emails"
|
||||||
|
args = append(args, UserTypeIndividual)
|
||||||
|
|
||||||
|
if len(opts.Keyword) > 0 {
|
||||||
|
// Note: % can be injected in the Keyword parameter, but it won't do any harm.
|
||||||
|
where = append(where, "(lower(`user`.full_name) LIKE ? OR `user`.lower_name LIKE ? OR emails.email LIKE ?)")
|
||||||
|
likeStr := "%" + strings.ToLower(opts.Keyword) + "%"
|
||||||
|
args = append(args, likeStr)
|
||||||
|
args = append(args, likeStr)
|
||||||
|
args = append(args, likeStr)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case opts.IsPrimary.IsTrue():
|
||||||
|
where = append(where, "emails.is_primary = ?")
|
||||||
|
args = append(args, true)
|
||||||
|
case opts.IsPrimary.IsFalse():
|
||||||
|
where = append(where, "emails.is_primary = ?")
|
||||||
|
args = append(args, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case opts.IsActivated.IsTrue():
|
||||||
|
where = append(where, "emails.is_activated = ?")
|
||||||
|
args = append(args, true)
|
||||||
|
case opts.IsActivated.IsFalse():
|
||||||
|
where = append(where, "emails.is_activated = ?")
|
||||||
|
args = append(args, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
var whereStr string
|
||||||
|
if len(where) > 0 {
|
||||||
|
whereStr = "WHERE " + strings.Join(where, " AND ")
|
||||||
|
}
|
||||||
|
|
||||||
|
joinSQL := "FROM " + emailsSQL + " INNER JOIN `user` ON `user`.id = emails.uid " + whereStr
|
||||||
|
|
||||||
|
count, err := x.SQL("SELECT count(*) "+joinSQL, args...).Count()
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("Count: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
orderby := opts.SortType.String()
|
||||||
|
if orderby == "" {
|
||||||
|
orderby = SearchEmailOrderByEmail.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
querySQL := "SELECT emails.uid, emails.email, emails.is_activated, emails.is_primary, " +
|
||||||
|
"`user`.name, `user`.full_name " + joinSQL + " ORDER BY " + orderby
|
||||||
|
|
||||||
|
if opts.PageSize == 0 || opts.PageSize > setting.UI.ExplorePagingNum {
|
||||||
|
opts.PageSize = setting.UI.ExplorePagingNum
|
||||||
|
}
|
||||||
|
if opts.Page <= 0 {
|
||||||
|
opts.Page = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := x.SQL(querySQL, args...).Rows(new(SearchEmailResult))
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("Emails: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Page manually because xorm can't handle Limit() with raw SQL
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
emails := make([]*SearchEmailResult, 0, opts.PageSize)
|
||||||
|
skip := (opts.Page - 1) * opts.PageSize
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var email SearchEmailResult
|
||||||
|
if err := rows.Scan(&email); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
if skip > 0 {
|
||||||
|
skip--
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
emails = append(emails, &email)
|
||||||
|
if len(emails) == opts.PageSize {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return emails, count, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ActivateUserEmail will change the activated state of an email address,
|
||||||
|
// either primary (in the user table) or secondary (in the email_address table)
|
||||||
|
func ActivateUserEmail(userID int64, email string, primary, activate bool) (err error) {
|
||||||
|
sess := x.NewSession()
|
||||||
|
defer sess.Close()
|
||||||
|
if err = sess.Begin(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if primary {
|
||||||
|
// Activate/deactivate a user's primary email address
|
||||||
|
user := User{ID: userID, Email: email}
|
||||||
|
if has, err := sess.Get(&user); err != nil {
|
||||||
|
return err
|
||||||
|
} else if !has {
|
||||||
|
return fmt.Errorf("no such user: %d (%s)", userID, email)
|
||||||
|
}
|
||||||
|
if user.IsActive == activate {
|
||||||
|
// Already in the desired state; no action
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if activate {
|
||||||
|
if used, err := isEmailActive(sess, email, userID, 0); err != nil {
|
||||||
|
return fmt.Errorf("isEmailActive(): %v", err)
|
||||||
|
} else if used {
|
||||||
|
return ErrEmailAlreadyUsed{Email: email}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
user.IsActive = activate
|
||||||
|
if user.Rands, err = GetUserSalt(); err != nil {
|
||||||
|
return fmt.Errorf("generate salt: %v", err)
|
||||||
|
}
|
||||||
|
if err = updateUserCols(sess, &user, "is_active", "rands"); err != nil {
|
||||||
|
return fmt.Errorf("updateUserCols(): %v", err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Activate/deactivate a user's secondary email address
|
||||||
|
// First check if there's another user active with the same address
|
||||||
|
addr := EmailAddress{UID: userID, Email: email}
|
||||||
|
if has, err := sess.Get(&addr); err != nil {
|
||||||
|
return err
|
||||||
|
} else if !has {
|
||||||
|
return fmt.Errorf("no such email: %d (%s)", userID, email)
|
||||||
|
}
|
||||||
|
if addr.IsActivated == activate {
|
||||||
|
// Already in the desired state; no action
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if activate {
|
||||||
|
if used, err := isEmailActive(sess, email, 0, addr.ID); err != nil {
|
||||||
|
return fmt.Errorf("isEmailActive(): %v", err)
|
||||||
|
} else if used {
|
||||||
|
return ErrEmailAlreadyUsed{Email: email}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err = addr.updateActivation(sess, activate); err != nil {
|
||||||
|
return fmt.Errorf("updateActivation(): %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sess.Commit()
|
||||||
|
}
|
||||||
|
|
|
@ -7,6 +7,8 @@ package models
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -169,3 +171,65 @@ func TestActivate(t *testing.T) {
|
||||||
assert.True(t, emails[2].IsActivated)
|
assert.True(t, emails[2].IsActivated)
|
||||||
assert.True(t, emails[2].IsPrimary)
|
assert.True(t, emails[2].IsPrimary)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestListEmails(t *testing.T) {
|
||||||
|
assert.NoError(t, PrepareTestDatabase())
|
||||||
|
|
||||||
|
// Must find all users and their emails
|
||||||
|
opts := &SearchEmailOptions{}
|
||||||
|
emails, count, err := SearchEmails(opts)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotEqual(t, int64(0), count)
|
||||||
|
assert.True(t, count > 5)
|
||||||
|
|
||||||
|
contains := func(match func(s *SearchEmailResult) bool) bool {
|
||||||
|
for _, v := range emails {
|
||||||
|
if match(v) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.True(t, contains(func(s *SearchEmailResult) bool { return s.UID == 18 }))
|
||||||
|
// 'user3' is an organization
|
||||||
|
assert.False(t, contains(func(s *SearchEmailResult) bool { return s.UID == 3 }))
|
||||||
|
|
||||||
|
// Must find no records
|
||||||
|
opts = &SearchEmailOptions{Keyword: "NOTFOUND"}
|
||||||
|
emails, count, err = SearchEmails(opts)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, int64(0), count)
|
||||||
|
|
||||||
|
// Must find users 'user2', 'user28', etc.
|
||||||
|
opts = &SearchEmailOptions{Keyword: "user2"}
|
||||||
|
emails, count, err = SearchEmails(opts)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotEqual(t, int64(0), count)
|
||||||
|
assert.True(t, contains(func(s *SearchEmailResult) bool { return s.UID == 2 }))
|
||||||
|
assert.True(t, contains(func(s *SearchEmailResult) bool { return s.UID == 27 }))
|
||||||
|
|
||||||
|
// Must find only primary addresses (i.e. from the `user` table)
|
||||||
|
opts = &SearchEmailOptions{IsPrimary: util.OptionalBoolTrue}
|
||||||
|
emails, count, err = SearchEmails(opts)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, contains(func(s *SearchEmailResult) bool { return s.IsPrimary }))
|
||||||
|
assert.False(t, contains(func(s *SearchEmailResult) bool { return !s.IsPrimary }))
|
||||||
|
|
||||||
|
// Must find only inactive addresses (i.e. not validated)
|
||||||
|
opts = &SearchEmailOptions{IsActivated: util.OptionalBoolFalse}
|
||||||
|
emails, count, err = SearchEmails(opts)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, contains(func(s *SearchEmailResult) bool { return !s.IsActivated }))
|
||||||
|
assert.False(t, contains(func(s *SearchEmailResult) bool { return s.IsActivated }))
|
||||||
|
|
||||||
|
// Must find more than one page, but retrieve only one
|
||||||
|
opts = &SearchEmailOptions{
|
||||||
|
PageSize: 5,
|
||||||
|
Page: 1,
|
||||||
|
}
|
||||||
|
emails, count, err = SearchEmails(opts)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, 5, len(emails))
|
||||||
|
assert.True(t, count > int64(len(emails)))
|
||||||
|
}
|
||||||
|
|
|
@ -47,3 +47,13 @@ type AdminEditUserForm struct {
|
||||||
func (f *AdminEditUserForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
|
func (f *AdminEditUserForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
|
||||||
return validate(errs, ctx.Data, f, ctx.Locale)
|
return validate(errs, ctx.Data, f, ctx.Locale)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AdminDashboardForm form for admin dashboard operations
|
||||||
|
type AdminDashboardForm struct {
|
||||||
|
Op int `binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate validates form fields
|
||||||
|
func (f *AdminDashboardForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
|
||||||
|
return validate(errs, ctx.Data, f, ctx.Locale)
|
||||||
|
}
|
||||||
|
|
|
@ -30,6 +30,7 @@ type AuthenticationForm struct {
|
||||||
SearchPageSize int
|
SearchPageSize int
|
||||||
Filter string
|
Filter string
|
||||||
AdminFilter string
|
AdminFilter string
|
||||||
|
AllowDeactivateAll bool
|
||||||
IsActive bool
|
IsActive bool
|
||||||
IsSyncEnabled bool
|
IsSyncEnabled bool
|
||||||
SMTPAuth string
|
SMTPAuth string
|
||||||
|
|
|
@ -47,6 +47,7 @@ type Source struct {
|
||||||
Filter string // Query filter to validate entry
|
Filter string // Query filter to validate entry
|
||||||
AdminFilter string // Query filter to check if user is admin
|
AdminFilter string // Query filter to check if user is admin
|
||||||
Enabled bool // if this source is disabled
|
Enabled bool // if this source is disabled
|
||||||
|
AllowDeactivateAll bool // Allow an empty search response to deactivate all users from this source
|
||||||
}
|
}
|
||||||
|
|
||||||
// SearchResult : user data
|
// SearchResult : user data
|
||||||
|
|
|
@ -13,7 +13,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Auth pam auth service
|
// Auth pam auth service
|
||||||
func Auth(serviceName, userName, passwd string) error {
|
func Auth(serviceName, userName, passwd string) (string, error) {
|
||||||
t, err := pam.StartFunc(serviceName, userName, func(s pam.Style, msg string) (string, error) {
|
t, err := pam.StartFunc(serviceName, userName, func(s pam.Style, msg string) (string, error) {
|
||||||
switch s {
|
switch s {
|
||||||
case pam.PromptEchoOff:
|
case pam.PromptEchoOff:
|
||||||
|
@ -25,12 +25,14 @@ func Auth(serviceName, userName, passwd string) error {
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = t.Authenticate(0); err != nil {
|
if err = t.Authenticate(0); err != nil {
|
||||||
return err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
// PAM login names might suffer transformations in the PAM stack.
|
||||||
|
// We should take whatever the PAM stack returns for it.
|
||||||
|
return t.GetItem(pam.User)
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,6 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Auth not supported lack of pam tag
|
// Auth not supported lack of pam tag
|
||||||
func Auth(serviceName, userName, passwd string) error {
|
func Auth(serviceName, userName, passwd string) (string, error) {
|
||||||
return errors.New("PAM not supported")
|
return "", errors.New("PAM not supported")
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
// Copyright 2014 The Gogs Authors. All rights reserved.
|
// Copyright 2014 The Gogs Authors. All rights reserved.
|
||||||
|
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||||
// Use of this source code is governed by a MIT-style
|
// Use of this source code is governed by a MIT-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
@ -122,7 +123,7 @@ func (ctx *Context) RedirectToFirst(location ...string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
u, err := url.Parse(loc)
|
u, err := url.Parse(loc)
|
||||||
if err != nil || (u.Scheme != "" && !strings.HasPrefix(strings.ToLower(loc), strings.ToLower(setting.AppURL))) {
|
if err != nil || ((u.Scheme != "" || u.Host != "") && !strings.HasPrefix(strings.ToLower(loc), strings.ToLower(setting.AppURL))) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -91,12 +91,12 @@ func (r *Repository) CanUseTimetracker(issue *models.Issue, user *models.User) b
|
||||||
// 2. Is the user a contributor, admin, poster or assignee and do the repository policies require this?
|
// 2. Is the user a contributor, admin, poster or assignee and do the repository policies require this?
|
||||||
isAssigned, _ := models.IsUserAssignedToIssue(issue, user)
|
isAssigned, _ := models.IsUserAssignedToIssue(issue, user)
|
||||||
return r.Repository.IsTimetrackerEnabled() && (!r.Repository.AllowOnlyContributorsToTrackTime() ||
|
return r.Repository.IsTimetrackerEnabled() && (!r.Repository.AllowOnlyContributorsToTrackTime() ||
|
||||||
r.Permission.CanWrite(models.UnitTypeIssues) || issue.IsPoster(user.ID) || isAssigned)
|
r.Permission.CanWriteIssuesOrPulls(issue.IsPull) || issue.IsPoster(user.ID) || isAssigned)
|
||||||
}
|
}
|
||||||
|
|
||||||
// CanCreateIssueDependencies returns whether or not a user can create dependencies.
|
// CanCreateIssueDependencies returns whether or not a user can create dependencies.
|
||||||
func (r *Repository) CanCreateIssueDependencies(user *models.User) bool {
|
func (r *Repository) CanCreateIssueDependencies(user *models.User, isPull bool) bool {
|
||||||
return r.Permission.CanWrite(models.UnitTypeIssues) && r.Repository.IsDependenciesEnabled()
|
return r.Repository.IsDependenciesEnabled() && r.Permission.CanWriteIssuesOrPulls(isPull)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetCommitsCount returns cached commit count for current view
|
// GetCommitsCount returns cached commit count for current view
|
||||||
|
|
|
@ -318,7 +318,7 @@ func (repo *Repository) CommitsBetween(last *Commit, before *Commit) (*list.List
|
||||||
var stdout []byte
|
var stdout []byte
|
||||||
var err error
|
var err error
|
||||||
if before == nil {
|
if before == nil {
|
||||||
stdout, err = NewCommand("rev-list", before.ID.String()).RunInDirBytes(repo.Path)
|
stdout, err = NewCommand("rev-list", last.ID.String()).RunInDirBytes(repo.Path)
|
||||||
} else {
|
} else {
|
||||||
stdout, err = NewCommand("rev-list", before.ID.String()+"..."+last.ID.String()).RunInDirBytes(repo.Path)
|
stdout, err = NewCommand("rev-list", before.ID.String()+"..."+last.ID.String()).RunInDirBytes(repo.Path)
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,10 +7,12 @@ package graceful
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/tls"
|
"crypto/tls"
|
||||||
|
"io/ioutil"
|
||||||
"net"
|
"net"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
"sync/atomic"
|
||||||
"syscall"
|
"syscall"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -99,12 +101,25 @@ func (srv *Server) ListenAndServeTLS(certFile, keyFile string, serve ServeFuncti
|
||||||
}
|
}
|
||||||
|
|
||||||
config.Certificates = make([]tls.Certificate, 1)
|
config.Certificates = make([]tls.Certificate, 1)
|
||||||
var err error
|
|
||||||
config.Certificates[0], err = tls.LoadX509KeyPair(certFile, keyFile)
|
certPEMBlock, err := ioutil.ReadFile(certFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Failed to load https cert file %s for %s:%s: %v", certFile, srv.network, srv.address, err)
|
log.Error("Failed to load https cert file %s for %s:%s: %v", certFile, srv.network, srv.address, err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
keyPEMBlock, err := ioutil.ReadFile(keyFile)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to load https key file %s for %s:%s: %v", keyFile, srv.network, srv.address, err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
config.Certificates[0], err = tls.X509KeyPair(certPEMBlock, keyPEMBlock)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to create certificate from cert file %s and key file %s for %s:%s: %v", certFile, keyFile, srv.network, srv.address, err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
return srv.ListenAndServeTLSConfig(config, serve)
|
return srv.ListenAndServeTLSConfig(config, serve)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -201,9 +216,12 @@ func (wl *wrappedListener) Accept() (net.Conn, error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
closed := int32(0)
|
||||||
|
|
||||||
c = wrappedConn{
|
c = wrappedConn{
|
||||||
Conn: c,
|
Conn: c,
|
||||||
server: wl.server,
|
server: wl.server,
|
||||||
|
closed: &closed,
|
||||||
}
|
}
|
||||||
|
|
||||||
wl.server.wg.Add(1)
|
wl.server.wg.Add(1)
|
||||||
|
@ -227,12 +245,12 @@ func (wl *wrappedListener) File() (*os.File, error) {
|
||||||
type wrappedConn struct {
|
type wrappedConn struct {
|
||||||
net.Conn
|
net.Conn
|
||||||
server *Server
|
server *Server
|
||||||
|
closed *int32
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w wrappedConn) Close() error {
|
func (w wrappedConn) Close() error {
|
||||||
err := w.Conn.Close()
|
if atomic.CompareAndSwapInt32(w.closed, 0, 1) {
|
||||||
if err == nil {
|
|
||||||
w.server.wg.Done()
|
w.server.wg.Done()
|
||||||
}
|
}
|
||||||
return err
|
return w.Conn.Close()
|
||||||
}
|
}
|
||||||
|
|
|
@ -116,7 +116,12 @@ func nonGenesisChanges(repo *models.Repository, revision string) (*repoChanges,
|
||||||
if len(line) == 0 {
|
if len(line) == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
filename := strings.TrimSpace(line[1:])
|
fields := strings.Split(line, "\t")
|
||||||
|
if len(fields) < 2 {
|
||||||
|
log.Warn("Unparseable output for diff --name-status: `%s`)", line)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
filename := fields[1]
|
||||||
if len(filename) == 0 {
|
if len(filename) == 0 {
|
||||||
continue
|
continue
|
||||||
} else if filename[0] == '"' {
|
} else if filename[0] == '"' {
|
||||||
|
@ -126,11 +131,31 @@ func nonGenesisChanges(repo *models.Repository, revision string) (*repoChanges,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
switch status := line[0]; status {
|
switch status := fields[0][0]; status {
|
||||||
case 'M', 'A':
|
case 'M', 'A':
|
||||||
updatedFilenames = append(updatedFilenames, filename)
|
updatedFilenames = append(updatedFilenames, filename)
|
||||||
case 'D':
|
case 'D':
|
||||||
changes.RemovedFilenames = append(changes.RemovedFilenames, filename)
|
changes.RemovedFilenames = append(changes.RemovedFilenames, filename)
|
||||||
|
case 'R', 'C':
|
||||||
|
if len(fields) < 3 {
|
||||||
|
log.Warn("Unparseable output for diff --name-status: `%s`)", line)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
dest := fields[2]
|
||||||
|
if len(dest) == 0 {
|
||||||
|
log.Warn("Unparseable output for diff --name-status: `%s`)", line)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if dest[0] == '"' {
|
||||||
|
dest, err = strconv.Unquote(dest)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if status == 'R' {
|
||||||
|
changes.RemovedFilenames = append(changes.RemovedFilenames, filename)
|
||||||
|
}
|
||||||
|
updatedFilenames = append(updatedFilenames, dest)
|
||||||
default:
|
default:
|
||||||
log.Warn("Unrecognized status: %c (line=%s)", status, line)
|
log.Warn("Unrecognized status: %c (line=%s)", status, line)
|
||||||
}
|
}
|
||||||
|
|
|
@ -143,25 +143,23 @@ func InitIssueIndexer(syncReindex bool) {
|
||||||
var populate bool
|
var populate bool
|
||||||
switch setting.Indexer.IssueType {
|
switch setting.Indexer.IssueType {
|
||||||
case "bleve":
|
case "bleve":
|
||||||
graceful.GetManager().RunWithShutdownFns(func(_, atTerminate func(context.Context, func())) {
|
issueIndexer := NewBleveIndexer(setting.Indexer.IssuePath)
|
||||||
issueIndexer := NewBleveIndexer(setting.Indexer.IssuePath)
|
exist, err := issueIndexer.Init()
|
||||||
exist, err := issueIndexer.Init()
|
if err != nil {
|
||||||
if err != nil {
|
holder.cancel()
|
||||||
holder.cancel()
|
log.Fatal("Unable to initialize Bleve Issue Indexer: %v", err)
|
||||||
log.Fatal("Unable to initialize Bleve Issue Indexer: %v", err)
|
}
|
||||||
|
populate = !exist
|
||||||
|
holder.set(issueIndexer)
|
||||||
|
graceful.GetManager().RunAtTerminate(context.Background(), func() {
|
||||||
|
log.Debug("Closing issue indexer")
|
||||||
|
issueIndexer := holder.get()
|
||||||
|
if issueIndexer != nil {
|
||||||
|
issueIndexer.Close()
|
||||||
}
|
}
|
||||||
populate = !exist
|
log.Info("PID: %d Issue Indexer closed", os.Getpid())
|
||||||
holder.set(issueIndexer)
|
|
||||||
atTerminate(context.Background(), func() {
|
|
||||||
log.Debug("Closing issue indexer")
|
|
||||||
issueIndexer := holder.get()
|
|
||||||
if issueIndexer != nil {
|
|
||||||
issueIndexer.Close()
|
|
||||||
}
|
|
||||||
log.Info("PID: %d Issue Indexer closed", os.Getpid())
|
|
||||||
})
|
|
||||||
log.Debug("Created Bleve Indexer")
|
|
||||||
})
|
})
|
||||||
|
log.Debug("Created Bleve Indexer")
|
||||||
case "db":
|
case "db":
|
||||||
issueIndexer := &DBIndexer{}
|
issueIndexer := &DBIndexer{}
|
||||||
holder.set(issueIndexer)
|
holder.set(issueIndexer)
|
||||||
|
|
|
@ -108,7 +108,7 @@ func (s *linkifyParser) Parse(parent ast.Node, block text.Reader, pc parser.Cont
|
||||||
}
|
}
|
||||||
at := bytes.IndexByte(line, '@')
|
at := bytes.IndexByte(line, '@')
|
||||||
m = []int{0, stop, at, stop - 1}
|
m = []int{0, stop, at, stop - 1}
|
||||||
if m == nil || bytes.IndexByte(line[m[2]:m[3]], '.') < 0 {
|
if bytes.IndexByte(line[m[2]:m[3]], '.') < 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
lastChar := line[m[1]-1]
|
lastChar := line[m[1]-1]
|
||||||
|
|
|
@ -52,7 +52,6 @@ func (g *GiteaASTTransformer) Transform(node *ast.Document, reader text.Reader,
|
||||||
|
|
||||||
lnk := string(link)
|
lnk := string(link)
|
||||||
lnk = giteautil.URLJoin(prefix, lnk)
|
lnk = giteautil.URLJoin(prefix, lnk)
|
||||||
lnk = strings.Replace(lnk, " ", "+", -1)
|
|
||||||
link = []byte(lnk)
|
link = []byte(lnk)
|
||||||
}
|
}
|
||||||
v.Destination = link
|
v.Destination = link
|
||||||
|
@ -79,6 +78,9 @@ func (g *GiteaASTTransformer) Transform(node *ast.Document, reader text.Reader,
|
||||||
}
|
}
|
||||||
link = []byte(giteautil.URLJoin(pc.Get(urlPrefixKey).(string), lnk))
|
link = []byte(giteautil.URLJoin(pc.Get(urlPrefixKey).(string), lnk))
|
||||||
}
|
}
|
||||||
|
if len(link) > 0 && link[0] == '#' {
|
||||||
|
link = []byte("#user-content-" + string(link)[1:])
|
||||||
|
}
|
||||||
v.Destination = link
|
v.Destination = link
|
||||||
}
|
}
|
||||||
return ast.WalkContinue, nil
|
return ast.WalkContinue, nil
|
||||||
|
|
|
@ -48,8 +48,9 @@ func RenderRaw(body []byte, urlPrefix string, wikiMarkdown bool) []byte {
|
||||||
common.FootnoteExtension,
|
common.FootnoteExtension,
|
||||||
extension.NewTypographer(
|
extension.NewTypographer(
|
||||||
extension.WithTypographicSubstitutions(extension.TypographicSubstitutions{
|
extension.WithTypographicSubstitutions(extension.TypographicSubstitutions{
|
||||||
extension.EnDash: nil,
|
extension.EnDash: nil,
|
||||||
extension.EmDash: nil,
|
extension.EmDash: nil,
|
||||||
|
extension.Ellipsis: nil,
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
|
|
@ -81,7 +81,6 @@ func RenderWiki(filename string, rawBytes []byte, urlPrefix string, metas map[st
|
||||||
}
|
}
|
||||||
|
|
||||||
func render(parser Parser, rawBytes []byte, urlPrefix string, metas map[string]string, isWiki bool) []byte {
|
func render(parser Parser, rawBytes []byte, urlPrefix string, metas map[string]string, isWiki bool) []byte {
|
||||||
urlPrefix = strings.Replace(urlPrefix, " ", "+", -1)
|
|
||||||
result := parser.Render(rawBytes, urlPrefix, metas, isWiki)
|
result := parser.Render(rawBytes, urlPrefix, metas, isWiki)
|
||||||
// TODO: one day the error should be returned.
|
// TODO: one day the error should be returned.
|
||||||
result, err := PostProcess(result, urlPrefix, metas, isWiki)
|
result, err := PostProcess(result, urlPrefix, metas, isWiki)
|
||||||
|
|
|
@ -38,7 +38,7 @@ func NewSanitizer() {
|
||||||
func ReplaceSanitizer() {
|
func ReplaceSanitizer() {
|
||||||
sanitizer.policy = bluemonday.UGCPolicy()
|
sanitizer.policy = bluemonday.UGCPolicy()
|
||||||
// We only want to allow HighlightJS specific classes for code blocks
|
// We only want to allow HighlightJS specific classes for code blocks
|
||||||
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^language-\w+$`)).OnElements("code")
|
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^language-[\w-]+$`)).OnElements("code")
|
||||||
|
|
||||||
// Checkboxes
|
// Checkboxes
|
||||||
sanitizer.policy.AllowAttrs("type").Matching(regexp.MustCompile(`^checkbox$`)).OnElements("input")
|
sanitizer.policy.AllowAttrs("type").Matching(regexp.MustCompile(`^checkbox$`)).OnElements("input")
|
||||||
|
|
|
@ -14,6 +14,7 @@ type Comment struct {
|
||||||
PosterName string
|
PosterName string
|
||||||
PosterEmail string
|
PosterEmail string
|
||||||
Created time.Time
|
Created time.Time
|
||||||
|
Updated time.Time
|
||||||
Content string
|
Content string
|
||||||
Reactions *Reactions
|
Reactions *Reactions
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ type Issue struct {
|
||||||
State string // closed, open
|
State string // closed, open
|
||||||
IsLocked bool
|
IsLocked bool
|
||||||
Created time.Time
|
Created time.Time
|
||||||
|
Updated time.Time
|
||||||
Closed *time.Time
|
Closed *time.Time
|
||||||
Labels []*Label
|
Labels []*Label
|
||||||
Reactions *Reactions
|
Reactions *Reactions
|
||||||
|
|
|
@ -21,6 +21,7 @@ type PullRequest struct {
|
||||||
Milestone string
|
Milestone string
|
||||||
State string
|
State string
|
||||||
Created time.Time
|
Created time.Time
|
||||||
|
Updated time.Time
|
||||||
Closed *time.Time
|
Closed *time.Time
|
||||||
Labels []*Label
|
Labels []*Label
|
||||||
PatchURL string
|
PatchURL string
|
||||||
|
|
|
@ -332,6 +332,7 @@ func (g *GiteaLocalUploader) CreateIssues(issues ...*base.Issue) error {
|
||||||
MilestoneID: milestoneID,
|
MilestoneID: milestoneID,
|
||||||
Labels: labels,
|
Labels: labels,
|
||||||
CreatedUnix: timeutil.TimeStamp(issue.Created.Unix()),
|
CreatedUnix: timeutil.TimeStamp(issue.Created.Unix()),
|
||||||
|
UpdatedUnix: timeutil.TimeStamp(issue.Updated.Unix()),
|
||||||
}
|
}
|
||||||
|
|
||||||
userid, ok := g.userMap[issue.PosterID]
|
userid, ok := g.userMap[issue.PosterID]
|
||||||
|
@ -406,6 +407,7 @@ func (g *GiteaLocalUploader) CreateComments(comments ...*base.Comment) error {
|
||||||
Type: models.CommentTypeComment,
|
Type: models.CommentTypeComment,
|
||||||
Content: comment.Content,
|
Content: comment.Content,
|
||||||
CreatedUnix: timeutil.TimeStamp(comment.Created.Unix()),
|
CreatedUnix: timeutil.TimeStamp(comment.Created.Unix()),
|
||||||
|
UpdatedUnix: timeutil.TimeStamp(comment.Updated.Unix()),
|
||||||
}
|
}
|
||||||
|
|
||||||
if userid > 0 {
|
if userid > 0 {
|
||||||
|
@ -574,6 +576,7 @@ func (g *GiteaLocalUploader) newPullRequest(pr *base.PullRequest) (*models.PullR
|
||||||
IsLocked: pr.IsLocked,
|
IsLocked: pr.IsLocked,
|
||||||
Labels: labels,
|
Labels: labels,
|
||||||
CreatedUnix: timeutil.TimeStamp(pr.Created.Unix()),
|
CreatedUnix: timeutil.TimeStamp(pr.Created.Unix()),
|
||||||
|
UpdatedUnix: timeutil.TimeStamp(pr.Updated.Unix()),
|
||||||
}
|
}
|
||||||
|
|
||||||
userid, ok := g.userMap[pr.PosterID]
|
userid, ok := g.userMap[pr.PosterID]
|
||||||
|
|
|
@ -24,6 +24,8 @@ import (
|
||||||
var (
|
var (
|
||||||
_ base.Downloader = &GithubDownloaderV3{}
|
_ base.Downloader = &GithubDownloaderV3{}
|
||||||
_ base.DownloaderFactory = &GithubDownloaderV3Factory{}
|
_ base.DownloaderFactory = &GithubDownloaderV3Factory{}
|
||||||
|
// GithubLimitRateRemaining limit to wait for new rate to apply
|
||||||
|
GithubLimitRateRemaining = 0
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -115,7 +117,7 @@ func (g *GithubDownloaderV3) SetContext(ctx context.Context) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *GithubDownloaderV3) sleep() {
|
func (g *GithubDownloaderV3) sleep() {
|
||||||
for g.rate != nil && g.rate.Remaining <= 0 {
|
for g.rate != nil && g.rate.Remaining <= GithubLimitRateRemaining {
|
||||||
timer := time.NewTimer(time.Until(g.rate.Reset.Time))
|
timer := time.NewTimer(time.Until(g.rate.Reset.Time))
|
||||||
select {
|
select {
|
||||||
case <-g.ctx.Done():
|
case <-g.ctx.Done():
|
||||||
|
@ -124,15 +126,24 @@ func (g *GithubDownloaderV3) sleep() {
|
||||||
case <-timer.C:
|
case <-timer.C:
|
||||||
}
|
}
|
||||||
|
|
||||||
rates, _, err := g.client.RateLimits(g.ctx)
|
err := g.RefreshRate()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("g.client.RateLimits: %s", err)
|
log.Error("g.client.RateLimits: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
g.rate = rates.GetCore()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RefreshRate update the current rate (doesn't count in rate limit)
|
||||||
|
func (g *GithubDownloaderV3) RefreshRate() error {
|
||||||
|
rates, _, err := g.client.RateLimits(g.ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
g.rate = rates.GetCore()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// GetRepoInfo returns a repository information
|
// GetRepoInfo returns a repository information
|
||||||
func (g *GithubDownloaderV3) GetRepoInfo() (*base.Repository, error) {
|
func (g *GithubDownloaderV3) GetRepoInfo() (*base.Repository, error) {
|
||||||
g.sleep()
|
g.sleep()
|
||||||
|
@ -385,6 +396,7 @@ func (g *GithubDownloaderV3) GetIssues(page, perPage int) ([]*base.Issue, bool,
|
||||||
Milestone: milestone,
|
Milestone: milestone,
|
||||||
State: *issue.State,
|
State: *issue.State,
|
||||||
Created: *issue.CreatedAt,
|
Created: *issue.CreatedAt,
|
||||||
|
Updated: *issue.UpdatedAt,
|
||||||
Labels: labels,
|
Labels: labels,
|
||||||
Reactions: reactions,
|
Reactions: reactions,
|
||||||
Closed: issue.ClosedAt,
|
Closed: issue.ClosedAt,
|
||||||
|
@ -428,6 +440,7 @@ func (g *GithubDownloaderV3) GetComments(issueNumber int64) ([]*base.Comment, er
|
||||||
PosterEmail: email,
|
PosterEmail: email,
|
||||||
Content: *comment.Body,
|
Content: *comment.Body,
|
||||||
Created: *comment.CreatedAt,
|
Created: *comment.CreatedAt,
|
||||||
|
Updated: *comment.UpdatedAt,
|
||||||
Reactions: reactions,
|
Reactions: reactions,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -523,6 +536,7 @@ func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullReq
|
||||||
Milestone: milestone,
|
Milestone: milestone,
|
||||||
State: *pr.State,
|
State: *pr.State,
|
||||||
Created: *pr.CreatedAt,
|
Created: *pr.CreatedAt,
|
||||||
|
Updated: *pr.UpdatedAt,
|
||||||
Closed: pr.ClosedAt,
|
Closed: pr.ClosedAt,
|
||||||
Labels: labels,
|
Labels: labels,
|
||||||
Merged: merged,
|
Merged: merged,
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
package migrations
|
package migrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -62,7 +63,11 @@ func assertLabelEqual(t *testing.T, name, color, description string, label *base
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGitHubDownloadRepo(t *testing.T) {
|
func TestGitHubDownloadRepo(t *testing.T) {
|
||||||
downloader := NewGithubDownloaderV3("", "", "go-gitea", "test_repo")
|
GithubLimitRateRemaining = 3 //Wait at 3 remaining since we could have 3 CI in //
|
||||||
|
downloader := NewGithubDownloaderV3(os.Getenv("GITHUB_READ_TOKEN"), "", "go-gitea", "test_repo")
|
||||||
|
err := downloader.RefreshRate()
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
repo, err := downloader.GetRepoInfo()
|
repo, err := downloader.GetRepoInfo()
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, &base.Repository{
|
assert.EqualValues(t, &base.Repository{
|
||||||
|
@ -157,6 +162,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
|
||||||
PosterName: "guillep2k",
|
PosterName: "guillep2k",
|
||||||
State: "closed",
|
State: "closed",
|
||||||
Created: time.Date(2019, 11, 9, 17, 0, 29, 0, time.UTC),
|
Created: time.Date(2019, 11, 9, 17, 0, 29, 0, time.UTC),
|
||||||
|
Updated: time.Date(2019, 11, 12, 20, 29, 53, 0, time.UTC),
|
||||||
Labels: []*base.Label{
|
Labels: []*base.Label{
|
||||||
{
|
{
|
||||||
Name: "bug",
|
Name: "bug",
|
||||||
|
@ -189,6 +195,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
|
||||||
PosterName: "mrsdizzie",
|
PosterName: "mrsdizzie",
|
||||||
State: "closed",
|
State: "closed",
|
||||||
Created: time.Date(2019, 11, 12, 21, 0, 6, 0, time.UTC),
|
Created: time.Date(2019, 11, 12, 21, 0, 6, 0, time.UTC),
|
||||||
|
Updated: time.Date(2019, 11, 12, 22, 7, 14, 0, time.UTC),
|
||||||
Labels: []*base.Label{
|
Labels: []*base.Label{
|
||||||
{
|
{
|
||||||
Name: "duplicate",
|
Name: "duplicate",
|
||||||
|
@ -219,6 +226,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
|
||||||
PosterID: 1669571,
|
PosterID: 1669571,
|
||||||
PosterName: "mrsdizzie",
|
PosterName: "mrsdizzie",
|
||||||
Created: time.Date(2019, 11, 12, 21, 0, 13, 0, time.UTC),
|
Created: time.Date(2019, 11, 12, 21, 0, 13, 0, time.UTC),
|
||||||
|
Updated: time.Date(2019, 11, 12, 21, 0, 13, 0, time.UTC),
|
||||||
Content: "This is a comment",
|
Content: "This is a comment",
|
||||||
Reactions: &base.Reactions{
|
Reactions: &base.Reactions{
|
||||||
TotalCount: 1,
|
TotalCount: 1,
|
||||||
|
@ -235,6 +243,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
|
||||||
PosterID: 1669571,
|
PosterID: 1669571,
|
||||||
PosterName: "mrsdizzie",
|
PosterName: "mrsdizzie",
|
||||||
Created: time.Date(2019, 11, 12, 22, 7, 14, 0, time.UTC),
|
Created: time.Date(2019, 11, 12, 22, 7, 14, 0, time.UTC),
|
||||||
|
Updated: time.Date(2019, 11, 12, 22, 7, 14, 0, time.UTC),
|
||||||
Content: "A second comment",
|
Content: "A second comment",
|
||||||
Reactions: &base.Reactions{
|
Reactions: &base.Reactions{
|
||||||
TotalCount: 0,
|
TotalCount: 0,
|
||||||
|
@ -266,6 +275,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
|
||||||
PosterName: "mrsdizzie",
|
PosterName: "mrsdizzie",
|
||||||
State: "closed",
|
State: "closed",
|
||||||
Created: time.Date(2019, 11, 12, 21, 21, 43, 0, time.UTC),
|
Created: time.Date(2019, 11, 12, 21, 21, 43, 0, time.UTC),
|
||||||
|
Updated: time.Date(2019, 11, 12, 21, 39, 28, 0, time.UTC),
|
||||||
Labels: []*base.Label{
|
Labels: []*base.Label{
|
||||||
{
|
{
|
||||||
Name: "documentation",
|
Name: "documentation",
|
||||||
|
@ -302,6 +312,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
|
||||||
PosterName: "mrsdizzie",
|
PosterName: "mrsdizzie",
|
||||||
State: "open",
|
State: "open",
|
||||||
Created: time.Date(2019, 11, 12, 21, 54, 18, 0, time.UTC),
|
Created: time.Date(2019, 11, 12, 21, 54, 18, 0, time.UTC),
|
||||||
|
Updated: time.Date(2020, 1, 4, 11, 30, 1, 0, time.UTC),
|
||||||
Labels: []*base.Label{
|
Labels: []*base.Label{
|
||||||
{
|
{
|
||||||
Name: "bug",
|
Name: "bug",
|
||||||
|
|
|
@ -6,6 +6,7 @@ package indexer
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"code.gitea.io/gitea/models"
|
"code.gitea.io/gitea/models"
|
||||||
|
"code.gitea.io/gitea/modules/git"
|
||||||
code_indexer "code.gitea.io/gitea/modules/indexer/code"
|
code_indexer "code.gitea.io/gitea/modules/indexer/code"
|
||||||
issue_indexer "code.gitea.io/gitea/modules/indexer/issues"
|
issue_indexer "code.gitea.io/gitea/modules/indexer/issues"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
@ -118,7 +119,7 @@ func (r *indexerNotifier) NotifyMigrateRepository(doer *models.User, u *models.U
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *indexerNotifier) NotifyPushCommits(pusher *models.User, repo *models.Repository, refName, oldCommitID, newCommitID string, commits *models.PushCommits) {
|
func (r *indexerNotifier) NotifyPushCommits(pusher *models.User, repo *models.Repository, refName, oldCommitID, newCommitID string, commits *models.PushCommits) {
|
||||||
if setting.Indexer.RepoIndexerEnabled && refName == repo.DefaultBranch {
|
if setting.Indexer.RepoIndexerEnabled && refName == git.BranchPrefix+repo.DefaultBranch {
|
||||||
code_indexer.UpdateRepoIndexer(repo)
|
code_indexer.UpdateRepoIndexer(repo)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -689,12 +689,12 @@ func (m *webhookNotifier) NotifyDeleteRef(pusher *models.User, repo *models.Repo
|
||||||
|
|
||||||
if err := webhook_module.PrepareWebhooks(repo, models.HookEventDelete, &api.DeletePayload{
|
if err := webhook_module.PrepareWebhooks(repo, models.HookEventDelete, &api.DeletePayload{
|
||||||
Ref: refName,
|
Ref: refName,
|
||||||
RefType: "branch",
|
RefType: refType,
|
||||||
PusherType: api.PusherTypeUser,
|
PusherType: api.PusherTypeUser,
|
||||||
Repo: apiRepo,
|
Repo: apiRepo,
|
||||||
Sender: apiPusher,
|
Sender: apiPusher,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("PrepareWebhooks.(delete branch): %v", err)
|
log.Error("PrepareWebhooks.(delete %s): %v", refType, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,12 +25,13 @@ func TestChannelQueue(t *testing.T) {
|
||||||
|
|
||||||
queue, err := NewChannelQueue(handle,
|
queue, err := NewChannelQueue(handle,
|
||||||
ChannelQueueConfiguration{
|
ChannelQueueConfiguration{
|
||||||
QueueLength: 20,
|
QueueLength: 0,
|
||||||
Workers: 1,
|
|
||||||
MaxWorkers: 10,
|
MaxWorkers: 10,
|
||||||
BlockTimeout: 1 * time.Second,
|
BlockTimeout: 1 * time.Second,
|
||||||
BoostTimeout: 5 * time.Minute,
|
BoostTimeout: 5 * time.Minute,
|
||||||
BoostWorkers: 5,
|
BoostWorkers: 5,
|
||||||
|
Workers: 0,
|
||||||
|
Name: "TestChannelQueue",
|
||||||
}, &testData{})
|
}, &testData{})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ package queue
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
@ -33,6 +34,7 @@ type PersistableChannelQueueConfiguration struct {
|
||||||
type PersistableChannelQueue struct {
|
type PersistableChannelQueue struct {
|
||||||
*ChannelQueue
|
*ChannelQueue
|
||||||
delayedStarter
|
delayedStarter
|
||||||
|
lock sync.Mutex
|
||||||
closed chan struct{}
|
closed chan struct{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -84,11 +84,12 @@ func NewRedisQueue(handle HandlerFunc, cfg, exemplar interface{}) (Queue, error)
|
||||||
boostWorkers: config.BoostWorkers,
|
boostWorkers: config.BoostWorkers,
|
||||||
maxNumberOfWorkers: config.MaxWorkers,
|
maxNumberOfWorkers: config.MaxWorkers,
|
||||||
},
|
},
|
||||||
queueName: config.QueueName,
|
queueName: config.QueueName,
|
||||||
exemplar: exemplar,
|
exemplar: exemplar,
|
||||||
closed: make(chan struct{}),
|
closed: make(chan struct{}),
|
||||||
workers: config.Workers,
|
terminated: make(chan struct{}),
|
||||||
name: config.Name,
|
workers: config.Workers,
|
||||||
|
name: config.Name,
|
||||||
}
|
}
|
||||||
if len(dbs) == 0 {
|
if len(dbs) == 0 {
|
||||||
return nil, errors.New("no redis host specified")
|
return nil, errors.New("no redis host specified")
|
||||||
|
|
|
@ -28,7 +28,6 @@ type WrappedQueueConfiguration struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type delayedStarter struct {
|
type delayedStarter struct {
|
||||||
lock sync.Mutex
|
|
||||||
internal Queue
|
internal Queue
|
||||||
underlying Type
|
underlying Type
|
||||||
cfg interface{}
|
cfg interface{}
|
||||||
|
@ -62,7 +61,6 @@ func (q *delayedStarter) setInternal(atShutdown func(context.Context, func()), h
|
||||||
queue, err := NewQueue(q.underlying, handle, q.cfg, exemplar)
|
queue, err := NewQueue(q.underlying, handle, q.cfg, exemplar)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
q.internal = queue
|
q.internal = queue
|
||||||
q.lock.Unlock()
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
if err.Error() != "resource temporarily unavailable" {
|
if err.Error() != "resource temporarily unavailable" {
|
||||||
|
@ -90,6 +88,7 @@ func (q *delayedStarter) setInternal(atShutdown func(context.Context, func()), h
|
||||||
// WrappedQueue wraps a delayed starting queue
|
// WrappedQueue wraps a delayed starting queue
|
||||||
type WrappedQueue struct {
|
type WrappedQueue struct {
|
||||||
delayedStarter
|
delayedStarter
|
||||||
|
lock sync.Mutex
|
||||||
handle HandlerFunc
|
handle HandlerFunc
|
||||||
exemplar interface{}
|
exemplar interface{}
|
||||||
channel chan Data
|
channel chan Data
|
||||||
|
|
|
@ -96,8 +96,8 @@ func (p *WorkerPool) pushBoost(data Data) {
|
||||||
p.blockTimeout /= 2
|
p.blockTimeout /= 2
|
||||||
p.lock.Unlock()
|
p.lock.Unlock()
|
||||||
}()
|
}()
|
||||||
p.addWorkers(ctx, boost)
|
|
||||||
p.lock.Unlock()
|
p.lock.Unlock()
|
||||||
|
p.addWorkers(ctx, boost)
|
||||||
p.dataChan <- data
|
p.dataChan <- data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,8 +103,8 @@ func UpdateIssuesCommit(doer *models.User, repo *models.Repository, commits []*m
|
||||||
refMarked[key] = true
|
refMarked[key] = true
|
||||||
|
|
||||||
// FIXME: this kind of condition is all over the code, it should be consolidated in a single place
|
// FIXME: this kind of condition is all over the code, it should be consolidated in a single place
|
||||||
canclose := perm.IsAdmin() || perm.IsOwner() || perm.CanWrite(models.UnitTypeIssues) || refIssue.PosterID == doer.ID
|
canclose := perm.IsAdmin() || perm.IsOwner() || perm.CanWriteIssuesOrPulls(refIssue.IsPull) || refIssue.PosterID == doer.ID
|
||||||
cancomment := canclose || perm.CanRead(models.UnitTypeIssues)
|
cancomment := canclose || perm.CanReadIssuesOrPulls(refIssue.IsPull)
|
||||||
|
|
||||||
// Don't proceed if the user can't comment
|
// Don't proceed if the user can't comment
|
||||||
if !cancomment {
|
if !cancomment {
|
||||||
|
@ -137,9 +137,11 @@ func UpdateIssuesCommit(doer *models.User, repo *models.Repository, commits []*m
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
close := (ref.Action == references.XRefActionCloses)
|
||||||
if err := changeIssueStatus(refRepo, refIssue, doer, ref.Action == references.XRefActionCloses); err != nil {
|
if close != refIssue.IsClosed {
|
||||||
return err
|
if err := changeIssueStatus(refRepo, refIssue, doer, close); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,7 +46,7 @@ func DeleteRepoFile(repo *models.Repository, doer *models.User, opts *DeleteRepo
|
||||||
// If we aren't branching to a new branch, make sure user can commit to the given branch
|
// If we aren't branching to a new branch, make sure user can commit to the given branch
|
||||||
if opts.NewBranch != opts.OldBranch {
|
if opts.NewBranch != opts.OldBranch {
|
||||||
newBranch, err := repo.GetBranch(opts.NewBranch)
|
newBranch, err := repo.GetBranch(opts.NewBranch)
|
||||||
if git.IsErrNotExist(err) {
|
if err != nil && !git.IsErrBranchNotExist(err) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if newBranch != nil {
|
if newBranch != nil {
|
||||||
|
|
|
@ -242,10 +242,30 @@ func (t *TemporaryUploadRepository) CommitTreeWithDate(author, committer *models
|
||||||
func (t *TemporaryUploadRepository) Push(doer *models.User, commitHash string, branch string) error {
|
func (t *TemporaryUploadRepository) Push(doer *models.User, commitHash string, branch string) error {
|
||||||
// Because calls hooks we need to pass in the environment
|
// Because calls hooks we need to pass in the environment
|
||||||
env := models.PushingEnvironment(doer, t.repo)
|
env := models.PushingEnvironment(doer, t.repo)
|
||||||
|
stdout := &strings.Builder{}
|
||||||
|
stderr := &strings.Builder{}
|
||||||
|
|
||||||
if _, err := git.NewCommand("push", t.repo.RepoPath(), strings.TrimSpace(commitHash)+":refs/heads/"+strings.TrimSpace(branch)).RunInDirWithEnv(t.basePath, env); err != nil {
|
if err := git.NewCommand("push", t.repo.RepoPath(), strings.TrimSpace(commitHash)+":refs/heads/"+strings.TrimSpace(branch)).RunInDirTimeoutEnvPipeline(env, -1, t.basePath, stdout, stderr); err != nil {
|
||||||
log.Error("Unable to push back to repo from temporary repo: %s (%s) Error: %v",
|
errString := stderr.String()
|
||||||
t.repo.FullName(), t.basePath, err)
|
if strings.Contains(errString, "non-fast-forward") {
|
||||||
|
return models.ErrMergePushOutOfDate{
|
||||||
|
StdOut: stdout.String(),
|
||||||
|
StdErr: errString,
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
} else if strings.Contains(errString, "! [remote rejected]") {
|
||||||
|
log.Error("Unable to push back to repo from temporary repo due to rejection: %s (%s)\nStdout: %s\nStderr: %s\nError: %v",
|
||||||
|
t.repo.FullName(), t.basePath, stdout, errString, err)
|
||||||
|
err := models.ErrPushRejected{
|
||||||
|
StdOut: stdout.String(),
|
||||||
|
StdErr: errString,
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
err.GenerateMessage()
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
log.Error("Unable to push back to repo from temporary repo: %s (%s)\nStdout: %s\nError: %v",
|
||||||
|
t.repo.FullName(), t.basePath, stdout, err)
|
||||||
return fmt.Errorf("Unable to push back to repo from temporary repo: %s (%s) Error: %v",
|
return fmt.Errorf("Unable to push back to repo from temporary repo: %s (%s) Error: %v",
|
||||||
t.repo.FullName(), t.basePath, err)
|
t.repo.FullName(), t.basePath, err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -475,9 +475,18 @@ func PushUpdate(repo *models.Repository, branch string, opts PushUpdateOptions)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Trace("TriggerTask '%s/%s' by %s", repo.Name, branch, pusher.Name)
|
if !isDelRef {
|
||||||
|
if err = models.RemoveDeletedBranch(repo.ID, opts.Branch); err != nil {
|
||||||
|
log.Error("models.RemoveDeletedBranch %s/%s failed: %v", repo.ID, opts.Branch, err)
|
||||||
|
}
|
||||||
|
|
||||||
go pull_service.AddTestPullRequestTask(pusher, repo.ID, branch, true)
|
log.Trace("TriggerTask '%s/%s' by %s", repo.Name, branch, pusher.Name)
|
||||||
|
|
||||||
|
go pull_service.AddTestPullRequestTask(pusher, repo.ID, branch, true)
|
||||||
|
// close all related pulls
|
||||||
|
} else if err = pull_service.CloseBranchPulls(pusher, repo.ID, branch); err != nil {
|
||||||
|
log.Error("close related pull request failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
if err = models.WatchIfAuto(opts.PusherID, repo.ID, true); err != nil {
|
if err = models.WatchIfAuto(opts.PusherID, repo.ID, true); err != nil {
|
||||||
log.Warn("Fail to perform auto watch on user %v for repo %v: %v", opts.PusherID, repo.ID, err)
|
log.Warn("Fail to perform auto watch on user %v for repo %v: %v", opts.PusherID, repo.ID, err)
|
||||||
|
@ -524,12 +533,15 @@ func PushUpdates(repo *models.Repository, optsList []*PushUpdateOptions) error {
|
||||||
if err = models.RemoveDeletedBranch(repo.ID, opts.Branch); err != nil {
|
if err = models.RemoveDeletedBranch(repo.ID, opts.Branch); err != nil {
|
||||||
log.Error("models.RemoveDeletedBranch %s/%s failed: %v", repo.ID, opts.Branch, err)
|
log.Error("models.RemoveDeletedBranch %s/%s failed: %v", repo.ID, opts.Branch, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
log.Trace("TriggerTask '%s/%s' by %s", repo.Name, opts.Branch, pusher.Name)
|
||||||
|
|
||||||
|
go pull_service.AddTestPullRequestTask(pusher, repo.ID, opts.Branch, true)
|
||||||
|
// close all related pulls
|
||||||
|
} else if err = pull_service.CloseBranchPulls(pusher, repo.ID, opts.Branch); err != nil {
|
||||||
|
log.Error("close related pull request failed: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Trace("TriggerTask '%s/%s' by %s", repo.Name, opts.Branch, pusher.Name)
|
|
||||||
|
|
||||||
go pull_service.AddTestPullRequestTask(pusher, repo.ID, opts.Branch, true)
|
|
||||||
|
|
||||||
if err = models.WatchIfAuto(opts.PusherID, repo.ID, true); err != nil {
|
if err = models.WatchIfAuto(opts.PusherID, repo.ID, true); err != nil {
|
||||||
log.Warn("Fail to perform auto watch on user %v for repo %v: %v", opts.PusherID, repo.ID, err)
|
log.Warn("Fail to perform auto watch on user %v for repo %v: %v", opts.PusherID, repo.ID, err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -124,7 +124,7 @@ func DBConnStr() (string, error) {
|
||||||
if err := os.MkdirAll(path.Dir(Database.Path), os.ModePerm); err != nil {
|
if err := os.MkdirAll(path.Dir(Database.Path), os.ModePerm); err != nil {
|
||||||
return "", fmt.Errorf("Failed to create directories: %v", err)
|
return "", fmt.Errorf("Failed to create directories: %v", err)
|
||||||
}
|
}
|
||||||
connStr = fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d", Database.Path, Database.Timeout)
|
connStr = fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d&_txlock=immediate", Database.Path, Database.Timeout)
|
||||||
default:
|
default:
|
||||||
return "", fmt.Errorf("Unknown database type: %s", Database.Type)
|
return "", fmt.Errorf("Unknown database type: %s", Database.Type)
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,8 @@ var (
|
||||||
MaxGitDiffLines int
|
MaxGitDiffLines int
|
||||||
MaxGitDiffLineCharacters int
|
MaxGitDiffLineCharacters int
|
||||||
MaxGitDiffFiles int
|
MaxGitDiffFiles int
|
||||||
|
VerbosePush bool
|
||||||
|
VerbosePushDelay time.Duration
|
||||||
GCArgs []string `ini:"GC_ARGS" delim:" "`
|
GCArgs []string `ini:"GC_ARGS" delim:" "`
|
||||||
EnableAutoGitWireProtocol bool
|
EnableAutoGitWireProtocol bool
|
||||||
Timeout struct {
|
Timeout struct {
|
||||||
|
@ -36,6 +38,8 @@ var (
|
||||||
MaxGitDiffLines: 1000,
|
MaxGitDiffLines: 1000,
|
||||||
MaxGitDiffLineCharacters: 5000,
|
MaxGitDiffLineCharacters: 5000,
|
||||||
MaxGitDiffFiles: 100,
|
MaxGitDiffFiles: 100,
|
||||||
|
VerbosePush: true,
|
||||||
|
VerbosePushDelay: 5 * time.Second,
|
||||||
GCArgs: []string{},
|
GCArgs: []string{},
|
||||||
EnableAutoGitWireProtocol: true,
|
EnableAutoGitWireProtocol: true,
|
||||||
Timeout: struct {
|
Timeout: struct {
|
||||||
|
|
|
@ -7,6 +7,7 @@ package setting
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"path"
|
"path"
|
||||||
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
@ -44,7 +45,7 @@ func GetQueueSettings(name string) QueueSettings {
|
||||||
q := QueueSettings{}
|
q := QueueSettings{}
|
||||||
sec := Cfg.Section("queue." + name)
|
sec := Cfg.Section("queue." + name)
|
||||||
// DataDir is not directly inheritable
|
// DataDir is not directly inheritable
|
||||||
q.DataDir = path.Join(Queue.DataDir, name)
|
q.DataDir = filepath.Join(Queue.DataDir, name)
|
||||||
// QueueName is not directly inheritable either
|
// QueueName is not directly inheritable either
|
||||||
q.QueueName = name + Queue.QueueName
|
q.QueueName = name + Queue.QueueName
|
||||||
for _, key := range sec.Keys() {
|
for _, key := range sec.Keys() {
|
||||||
|
@ -55,8 +56,8 @@ func GetQueueSettings(name string) QueueSettings {
|
||||||
q.QueueName = key.MustString(q.QueueName)
|
q.QueueName = key.MustString(q.QueueName)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !path.IsAbs(q.DataDir) {
|
if !filepath.IsAbs(q.DataDir) {
|
||||||
q.DataDir = path.Join(AppDataPath, q.DataDir)
|
q.DataDir = filepath.Join(AppDataPath, q.DataDir)
|
||||||
}
|
}
|
||||||
sec.Key("DATADIR").SetValue(q.DataDir)
|
sec.Key("DATADIR").SetValue(q.DataDir)
|
||||||
// The rest are...
|
// The rest are...
|
||||||
|
@ -82,8 +83,8 @@ func GetQueueSettings(name string) QueueSettings {
|
||||||
func NewQueueService() {
|
func NewQueueService() {
|
||||||
sec := Cfg.Section("queue")
|
sec := Cfg.Section("queue")
|
||||||
Queue.DataDir = sec.Key("DATADIR").MustString("queues/")
|
Queue.DataDir = sec.Key("DATADIR").MustString("queues/")
|
||||||
if !path.IsAbs(Queue.DataDir) {
|
if !filepath.IsAbs(Queue.DataDir) {
|
||||||
Queue.DataDir = path.Join(AppDataPath, Queue.DataDir)
|
Queue.DataDir = filepath.Join(AppDataPath, Queue.DataDir)
|
||||||
}
|
}
|
||||||
Queue.Length = sec.Key("LENGTH").MustInt(20)
|
Queue.Length = sec.Key("LENGTH").MustInt(20)
|
||||||
Queue.BatchLength = sec.Key("BATCH_LENGTH").MustInt(20)
|
Queue.BatchLength = sec.Key("BATCH_LENGTH").MustInt(20)
|
||||||
|
|
|
@ -554,6 +554,12 @@ func NewContext() {
|
||||||
Protocol = HTTPS
|
Protocol = HTTPS
|
||||||
CertFile = sec.Key("CERT_FILE").String()
|
CertFile = sec.Key("CERT_FILE").String()
|
||||||
KeyFile = sec.Key("KEY_FILE").String()
|
KeyFile = sec.Key("KEY_FILE").String()
|
||||||
|
if !filepath.IsAbs(CertFile) && len(CertFile) > 0 {
|
||||||
|
CertFile = filepath.Join(CustomPath, CertFile)
|
||||||
|
}
|
||||||
|
if !filepath.IsAbs(KeyFile) && len(KeyFile) > 0 {
|
||||||
|
KeyFile = filepath.Join(CustomPath, KeyFile)
|
||||||
|
}
|
||||||
case "fcgi":
|
case "fcgi":
|
||||||
Protocol = FCGI
|
Protocol = FCGI
|
||||||
case "fcgi+unix":
|
case "fcgi+unix":
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
package structs // import "code.gitea.io/gitea/modules/structs"
|
package structs // import "code.gitea.io/gitea/modules/structs"
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
|
@ -0,0 +1,63 @@
|
||||||
|
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package structs
|
||||||
|
|
||||||
|
// CommitStatusState holds the state of a Status
|
||||||
|
// It can be "pending", "success", "error", "failure", and "warning"
|
||||||
|
type CommitStatusState string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// CommitStatusPending is for when the Status is Pending
|
||||||
|
CommitStatusPending CommitStatusState = "pending"
|
||||||
|
// CommitStatusSuccess is for when the Status is Success
|
||||||
|
CommitStatusSuccess CommitStatusState = "success"
|
||||||
|
// CommitStatusError is for when the Status is Error
|
||||||
|
CommitStatusError CommitStatusState = "error"
|
||||||
|
// CommitStatusFailure is for when the Status is Failure
|
||||||
|
CommitStatusFailure CommitStatusState = "failure"
|
||||||
|
// CommitStatusWarning is for when the Status is Warning
|
||||||
|
CommitStatusWarning CommitStatusState = "warning"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NoBetterThan returns true if this State is no better than the given State
|
||||||
|
func (css CommitStatusState) NoBetterThan(css2 CommitStatusState) bool {
|
||||||
|
switch css {
|
||||||
|
case CommitStatusError:
|
||||||
|
return true
|
||||||
|
case CommitStatusFailure:
|
||||||
|
return css2 != CommitStatusError
|
||||||
|
case CommitStatusWarning:
|
||||||
|
return css2 != CommitStatusError && css2 != CommitStatusFailure
|
||||||
|
case CommitStatusPending:
|
||||||
|
return css2 != CommitStatusError && css2 != CommitStatusFailure && css2 != CommitStatusWarning
|
||||||
|
default:
|
||||||
|
return css2 != CommitStatusError && css2 != CommitStatusFailure && css2 != CommitStatusWarning && css2 != CommitStatusPending
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsPending represents if commit status state is pending
|
||||||
|
func (css CommitStatusState) IsPending() bool {
|
||||||
|
return css == CommitStatusPending
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsSuccess represents if commit status state is success
|
||||||
|
func (css CommitStatusState) IsSuccess() bool {
|
||||||
|
return css == CommitStatusSuccess
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsError represents if commit status state is error
|
||||||
|
func (css CommitStatusState) IsError() bool {
|
||||||
|
return css == CommitStatusError
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsFailure represents if commit status state is failure
|
||||||
|
func (css CommitStatusState) IsFailure() bool {
|
||||||
|
return css == CommitStatusFailure
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsWarning represents if commit status state is warning
|
||||||
|
func (css CommitStatusState) IsWarning() bool {
|
||||||
|
return css == CommitStatusWarning
|
||||||
|
}
|
|
@ -38,6 +38,7 @@ type RepositoryMeta struct {
|
||||||
type Issue struct {
|
type Issue struct {
|
||||||
ID int64 `json:"id"`
|
ID int64 `json:"id"`
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
|
HTMLURL string `json:"html_url"`
|
||||||
Index int64 `json:"number"`
|
Index int64 `json:"number"`
|
||||||
Poster *User `json:"user"`
|
Poster *User `json:"user"`
|
||||||
OriginalAuthor string `json:"original_author"`
|
OriginalAuthor string `json:"original_author"`
|
||||||
|
|
|
@ -82,6 +82,7 @@ func (q *UniqueQueue) AddFunc(id interface{}, fn func()) {
|
||||||
idStr := com.ToStr(id)
|
idStr := com.ToStr(id)
|
||||||
q.table.lock.Lock()
|
q.table.lock.Lock()
|
||||||
if _, ok := q.table.pool[idStr]; ok {
|
if _, ok := q.table.pool[idStr]; ok {
|
||||||
|
q.table.lock.Unlock()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
q.table.pool[idStr] = struct{}{}
|
q.table.pool[idStr] = struct{}{}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue