Compare commits
90 commits
26e09364b9
...
209d9f3507
Author | SHA1 | Date | |
---|---|---|---|
209d9f3507 | |||
|
07531cf953 | ||
|
4f8ae2881c | ||
|
b31c44894e | ||
|
98820fe4f2 | ||
|
28acd6e262 | ||
|
c8f437b316 | ||
|
563fc65e35 | ||
|
4ac522c8aa | ||
|
44658b7d2c | ||
|
471138829b | ||
|
a7ecb5a8bf | ||
|
682f613810 | ||
|
5289619383 | ||
|
622ec5c79f | ||
|
2f34f1ec21 | ||
|
601df4d472 | ||
|
b683b93d16 | ||
|
d297a87f75 | ||
|
f74522a352 | ||
|
0c8a96896f | ||
|
82e5247a43 | ||
|
a12ea7a7cd | ||
|
3e48942b1e | ||
|
6ab5735e90 | ||
|
fe55c78fb1 | ||
|
1b568e284f | ||
|
03f33a0320 | ||
|
2605e121d4 | ||
|
1ffddf75d6 | ||
|
49d2a9e43c | ||
|
4e6b43e4bc | ||
|
5cf57d940f | ||
|
bbe06fcf17 | ||
|
3d69647e06 | ||
|
c029b1a3bc | ||
|
e64aa0d9c2 | ||
|
0414e95cfb | ||
|
8265bece8e | ||
|
119afd3761 | ||
|
802701acad | ||
|
46ba658cc2 | ||
|
d83135c204 | ||
|
695fda3dd0 | ||
|
64e2c8f6ff | ||
|
678c611c3d | ||
|
9db27d2602 | ||
|
f597c789e1 | ||
|
92589b9b8d | ||
|
916ec9acab | ||
|
2e539d5190 | ||
|
149cd865ea | ||
|
2f6b7ce91a | ||
|
75417ed070 | ||
|
33c52556a3 | ||
|
e5c26e38f5 | ||
|
5aa6a8288d | ||
|
9e4be39acb | ||
|
db326835e6 | ||
|
b97dbf7a9e | ||
|
bd416d0ba9 | ||
|
6a15395f19 | ||
|
20557c6bdb | ||
|
c1544754f8 | ||
|
c862cc15c8 | ||
|
c2f2fed57a | ||
|
cc8c7005a6 | ||
|
b073f7fd6a | ||
|
1d900bc6a9 | ||
|
8126dadc8d | ||
|
88e179d5ef | ||
|
08f1fe5812 | ||
|
7e18a86a3a | ||
|
070904b531 | ||
|
90053ce279 | ||
|
30b11209d1 | ||
|
b45b87988b | ||
|
291b1b6a26 | ||
|
82cb19649d | ||
|
a8f5ad1437 | ||
|
84c3b60a4c | ||
|
5d3cfbd2ba | ||
|
b1b90dbb4b | ||
|
d89003cc1b | ||
|
c9cd5fc65a | ||
|
d21b0026c7 | ||
|
5f769ef20d | ||
|
eaa9b35cf6 | ||
|
9b71369be9 | ||
|
e7d0475e15 |
151 changed files with 2382 additions and 413 deletions
26
.forgejo/upgrades/default-app.ini
Normal file
26
.forgejo/upgrades/default-app.ini
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
RUN_MODE = prod
|
||||||
|
WORK_PATH = ${WORK_PATH}
|
||||||
|
|
||||||
|
[server]
|
||||||
|
APP_DATA_PATH = ${WORK_PATH}/data
|
||||||
|
HTTP_PORT = 3000
|
||||||
|
SSH_LISTEN_PORT = 2222
|
||||||
|
LFS_START_SERVER = true
|
||||||
|
|
||||||
|
[database]
|
||||||
|
DB_TYPE = sqlite3
|
||||||
|
PATH = ${WORK_PATH}/forgejo.db
|
||||||
|
|
||||||
|
[log]
|
||||||
|
MODE = file
|
||||||
|
LEVEL = debug
|
||||||
|
ROUTER = file
|
||||||
|
|
||||||
|
[log.file]
|
||||||
|
FILE_NAME = forgejo.log
|
||||||
|
|
||||||
|
[security]
|
||||||
|
INSTALL_LOCK = true
|
||||||
|
|
||||||
|
[actions]
|
||||||
|
ENABLED = true
|
28
.forgejo/upgrades/merged-app.ini
Normal file
28
.forgejo/upgrades/merged-app.ini
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
RUN_MODE = prod
|
||||||
|
WORK_PATH = ${WORK_PATH}
|
||||||
|
|
||||||
|
[server]
|
||||||
|
APP_DATA_PATH = ${WORK_PATH}/data
|
||||||
|
HTTP_PORT = 3000
|
||||||
|
SSH_LISTEN_PORT = 2222
|
||||||
|
LFS_START_SERVER = true
|
||||||
|
|
||||||
|
[database]
|
||||||
|
DB_TYPE = sqlite3
|
||||||
|
|
||||||
|
[log]
|
||||||
|
MODE = file
|
||||||
|
LEVEL = debug
|
||||||
|
ROUTER = file
|
||||||
|
|
||||||
|
[log.file]
|
||||||
|
FILE_NAME = forgejo.log
|
||||||
|
|
||||||
|
[security]
|
||||||
|
INSTALL_LOCK = true
|
||||||
|
|
||||||
|
[actions]
|
||||||
|
ENABLED = true
|
||||||
|
|
||||||
|
[storage]
|
||||||
|
PATH = ${WORK_PATH}/merged
|
55
.forgejo/upgrades/misplace-app.ini
Normal file
55
.forgejo/upgrades/misplace-app.ini
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
RUN_MODE = prod
|
||||||
|
WORK_PATH = ${WORK_PATH}
|
||||||
|
|
||||||
|
[server]
|
||||||
|
APP_DATA_PATH = ${WORK_PATH}/elsewhere
|
||||||
|
HTTP_PORT = 3000
|
||||||
|
SSH_LISTEN_PORT = 2222
|
||||||
|
LFS_START_SERVER = true
|
||||||
|
|
||||||
|
[database]
|
||||||
|
DB_TYPE = sqlite3
|
||||||
|
|
||||||
|
[log]
|
||||||
|
MODE = file
|
||||||
|
LEVEL = debug
|
||||||
|
ROUTER = file
|
||||||
|
|
||||||
|
[log.file]
|
||||||
|
FILE_NAME = forgejo.log
|
||||||
|
|
||||||
|
[security]
|
||||||
|
INSTALL_LOCK = true
|
||||||
|
|
||||||
|
[actions]
|
||||||
|
ENABLED = true
|
||||||
|
|
||||||
|
[attachment]
|
||||||
|
|
||||||
|
[storage.attachments]
|
||||||
|
PATH = ${WORK_PATH}/data/attachments
|
||||||
|
|
||||||
|
[lfs]
|
||||||
|
|
||||||
|
[storage.lfs]
|
||||||
|
PATH = ${WORK_PATH}/data/lfs
|
||||||
|
|
||||||
|
[avatar]
|
||||||
|
|
||||||
|
[storage.avatars]
|
||||||
|
PATH = ${WORK_PATH}/data/avatars
|
||||||
|
|
||||||
|
[repo-avatar]
|
||||||
|
|
||||||
|
[storage.repo-avatars]
|
||||||
|
PATH = ${WORK_PATH}/data/repo-avatars
|
||||||
|
|
||||||
|
[repo-archive]
|
||||||
|
|
||||||
|
[storage.repo-archive]
|
||||||
|
PATH = ${WORK_PATH}/data/repo-archive
|
||||||
|
|
||||||
|
[packages]
|
||||||
|
|
||||||
|
[storage.packages]
|
||||||
|
PATH = ${WORK_PATH}/data/packages
|
43
.forgejo/upgrades/specific-app.ini
Normal file
43
.forgejo/upgrades/specific-app.ini
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
RUN_MODE = prod
|
||||||
|
WORK_PATH = ${WORK_PATH}
|
||||||
|
|
||||||
|
[server]
|
||||||
|
APP_DATA_PATH = ${WORK_PATH}/elsewhere
|
||||||
|
HTTP_PORT = 3000
|
||||||
|
SSH_LISTEN_PORT = 2222
|
||||||
|
LFS_START_SERVER = true
|
||||||
|
|
||||||
|
[database]
|
||||||
|
DB_TYPE = sqlite3
|
||||||
|
|
||||||
|
[log]
|
||||||
|
MODE = file
|
||||||
|
LEVEL = debug
|
||||||
|
ROUTER = file
|
||||||
|
|
||||||
|
[log.file]
|
||||||
|
FILE_NAME = forgejo.log
|
||||||
|
|
||||||
|
[security]
|
||||||
|
INSTALL_LOCK = true
|
||||||
|
|
||||||
|
[actions]
|
||||||
|
ENABLED = true
|
||||||
|
|
||||||
|
[attachment]
|
||||||
|
PATH = ${WORK_PATH}/data/attachments
|
||||||
|
|
||||||
|
[lfs]
|
||||||
|
PATH = ${WORK_PATH}/data/lfs
|
||||||
|
|
||||||
|
[avatar]
|
||||||
|
PATH = ${WORK_PATH}/data/avatars
|
||||||
|
|
||||||
|
[repo-avatar]
|
||||||
|
PATH = ${WORK_PATH}/data/repo-avatars
|
||||||
|
|
||||||
|
[repo-archive]
|
||||||
|
PATH = ${WORK_PATH}/data/repo-archive
|
||||||
|
|
||||||
|
[packages]
|
||||||
|
PATH = ${WORK_PATH}/data/packages
|
274
.forgejo/upgrades/test-upgrade.sh
Executable file
274
.forgejo/upgrades/test-upgrade.sh
Executable file
|
@ -0,0 +1,274 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
HOST_PORT=0.0.0.0:3000
|
||||||
|
STORAGE_PATHS="attachments avatars lfs packages repo-archive repo-avatars"
|
||||||
|
DIR=/tmp/forgejo-upgrades
|
||||||
|
SELF_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
PS4='${BASH_SOURCE[0]}:$LINENO: ${FUNCNAME[0]}: '
|
||||||
|
|
||||||
|
function maybe_sudo() {
|
||||||
|
if test $(id -u) != 0 ; then
|
||||||
|
SUDO=sudo
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function dependencies() {
|
||||||
|
if ! which curl daemon > /dev/null ; then
|
||||||
|
maybe_sudo
|
||||||
|
$SUDO apt-get install -y -qq curl daemon
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function build() {
|
||||||
|
local version=$1
|
||||||
|
local semver=$2
|
||||||
|
|
||||||
|
if ! test -f $DIR/forgejo-$version ; then
|
||||||
|
mkdir -p $DIR
|
||||||
|
make VERSION=v$version GITEA_VERSION=v$version FORGEJO_VERSION=$semver TAGS='bindata sqlite sqlite_unlock_notify' generate gitea
|
||||||
|
mv gitea $DIR/forgejo-$version
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function build_all() {
|
||||||
|
test -f Makefile
|
||||||
|
build 1.20.3-0 5.0.2+0-gitea-1.20.3
|
||||||
|
build 1.21.0-0 6.0.0+0-gitea-1.21.0
|
||||||
|
}
|
||||||
|
|
||||||
|
function wait_for() {
|
||||||
|
rm -f $DIR/setup-forgejo.out
|
||||||
|
success=false
|
||||||
|
for delay in 1 1 5 5 15 ; do
|
||||||
|
if "$@" >> $DIR/setup-forgejo.out 2>&1 ; then
|
||||||
|
success=true
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
cat $DIR/setup-forgejo.out
|
||||||
|
echo waiting $delay
|
||||||
|
sleep $delay
|
||||||
|
done
|
||||||
|
if test $success = false ; then
|
||||||
|
cat $DIR/setup-forgejo.out
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function download() {
|
||||||
|
local version=$1
|
||||||
|
|
||||||
|
if ! test -f $DIR/forgejo-$version ; then
|
||||||
|
mkdir -p $DIR
|
||||||
|
wget -O $DIR/forgejo-$version --quiet https://codeberg.org/forgejo/forgejo/releases/download/v$version/forgejo-$version-linux-amd64
|
||||||
|
chmod +x $DIR/forgejo-$version
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanup_logs() {
|
||||||
|
local work_path=$DIR/forgejo-work-path
|
||||||
|
|
||||||
|
rm -f $DIR/*.log
|
||||||
|
rm -f $work_path/log/*.log
|
||||||
|
}
|
||||||
|
|
||||||
|
function start() {
|
||||||
|
local version=$1
|
||||||
|
|
||||||
|
download $version
|
||||||
|
local work_path=$DIR/forgejo-work-path
|
||||||
|
daemon --chdir=$DIR --unsafe --env="TERM=$TERM" --env="HOME=$HOME" --env="PATH=$PATH" --pidfile=$DIR/forgejo-pid --errlog=$DIR/forgejo-err.log --output=$DIR/forgejo-out.log -- $DIR/forgejo-$version --config $work_path/app.ini --work-path $work_path
|
||||||
|
if ! wait_for grep 'Starting server on' $work_path/log/forgejo.log ; then
|
||||||
|
cat $DIR/*.log
|
||||||
|
cat $work_path/log/*.log
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
create_user $version
|
||||||
|
$work_path/forgejo-api http://${HOST_PORT}/api/v1/version
|
||||||
|
}
|
||||||
|
|
||||||
|
function create_user() {
|
||||||
|
local version=$1
|
||||||
|
|
||||||
|
local work_path=$DIR/forgejo-work-path
|
||||||
|
|
||||||
|
if test -f $work_path/forgejo-token; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
local user=root
|
||||||
|
local password=admin1234
|
||||||
|
local cli="$DIR/forgejo-$version --config $work_path/app.ini --work-path $work_path"
|
||||||
|
$cli admin user create --admin --username "$user" --password "$password" --email "$user@example.com"
|
||||||
|
local scopes="--scopes all"
|
||||||
|
if echo $version | grep --quiet 1.18. ; then
|
||||||
|
scopes=""
|
||||||
|
fi
|
||||||
|
$cli admin user generate-access-token -u $user --raw $scopes > $work_path/forgejo-token
|
||||||
|
( echo -n 'Authorization: token ' ; cat $work_path/forgejo-token ) > $work_path/forgejo-header
|
||||||
|
( echo "#!/bin/sh" ; echo 'curl -sS -H "Content-Type: application/json" -H @'$work_path/forgejo-header' "$@"' ) > $work_path/forgejo-api && chmod +x $work_path/forgejo-api
|
||||||
|
}
|
||||||
|
|
||||||
|
function stop() {
|
||||||
|
if test -f $DIR/forgejo-pid ; then
|
||||||
|
local pid=$(cat $DIR/forgejo-pid)
|
||||||
|
kill -TERM $pid
|
||||||
|
pidwait $pid || true
|
||||||
|
for delay in 1 1 2 2 5 5 ; do
|
||||||
|
if ! test -f $DIR/forgejo-pid ; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
sleep $delay
|
||||||
|
done
|
||||||
|
! test -f $DIR/forgejo-pid
|
||||||
|
fi
|
||||||
|
cleanup_logs
|
||||||
|
}
|
||||||
|
|
||||||
|
function reset() {
|
||||||
|
local config=$1
|
||||||
|
local work_path=$DIR/forgejo-work-path
|
||||||
|
rm -fr $work_path
|
||||||
|
mkdir -p $work_path
|
||||||
|
WORK_PATH=$work_path envsubst < $SELF_DIR/$config-app.ini > $work_path/app.ini
|
||||||
|
}
|
||||||
|
|
||||||
|
function verify_storage() {
|
||||||
|
local work_path=$DIR/forgejo-work-path
|
||||||
|
|
||||||
|
for path in ${STORAGE_PATHS} ; do
|
||||||
|
test -d $work_path/data/$path
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanup_storage() {
|
||||||
|
local work_path=$DIR/forgejo-work-path
|
||||||
|
|
||||||
|
for path in ${STORAGE_PATHS} ; do
|
||||||
|
rm -fr $work_path/data/$path
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
function test_downgrade_1.20.2_fails() {
|
||||||
|
local work_path=$DIR/forgejo-work-path
|
||||||
|
|
||||||
|
echo "================ See also https://codeberg.org/forgejo/forgejo/pulls/1225"
|
||||||
|
|
||||||
|
|
||||||
|
echo "================ downgrading from 1.20.3-0 to 1.20.2-0 fails"
|
||||||
|
stop
|
||||||
|
reset default
|
||||||
|
start 1.20.3-0
|
||||||
|
stop
|
||||||
|
download 1.20.2-0
|
||||||
|
timeout 60 $DIR/forgejo-1.20.2-0 --config $work_path/app.ini --work-path $work_path || true
|
||||||
|
if ! grep --fixed-strings --quiet 'use the newer database' $work_path/log/forgejo.log ; then
|
||||||
|
cat $work_path/log/forgejo.log
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function test_bug_storage_merged() {
|
||||||
|
local work_path=$DIR/forgejo-work-path
|
||||||
|
|
||||||
|
echo "================ See also https://codeberg.org/forgejo/forgejo/pulls/1225"
|
||||||
|
|
||||||
|
echo "================ using < 1.20.3-0 and [storage].PATH merge all storage"
|
||||||
|
for version in 1.18.5-0 1.19.4-0 1.20.2-0 ; do
|
||||||
|
stop
|
||||||
|
reset merged
|
||||||
|
start $version
|
||||||
|
for path in ${STORAGE_PATHS} ; do
|
||||||
|
! test -d $work_path/data/$path
|
||||||
|
done
|
||||||
|
for path in ${STORAGE_PATHS} ; do
|
||||||
|
! test -d $work_path/merged/$path
|
||||||
|
done
|
||||||
|
test -d $work_path/merged
|
||||||
|
done
|
||||||
|
stop
|
||||||
|
|
||||||
|
echo "================ upgrading from 1.20.2-0 with [storage].PATH fails"
|
||||||
|
download 1.20.3-0
|
||||||
|
timeout 60 $DIR/forgejo-1.20.3-0 --config $work_path/app.ini --work-path $work_path || true
|
||||||
|
if ! grep --fixed-strings --quiet '[storage].PATH is set and may create storage issues' $work_path/log/forgejo.log ; then
|
||||||
|
cat $work_path/log/forgejo.log
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function test_bug_storage_misplace() {
|
||||||
|
local work_path=$DIR/forgejo-work-path
|
||||||
|
|
||||||
|
echo "================ See also https://codeberg.org/forgejo/forgejo/pulls/1225"
|
||||||
|
|
||||||
|
echo "================ using < 1.20 and conflicting sections misplace storage"
|
||||||
|
for version in 1.18.5-0 1.19.4-0 ; do
|
||||||
|
stop
|
||||||
|
reset misplace
|
||||||
|
start $version
|
||||||
|
#
|
||||||
|
# some storage are where they should be
|
||||||
|
#
|
||||||
|
test -d $work_path/data/packages
|
||||||
|
test -d $work_path/data/repo-archive
|
||||||
|
test -d $work_path/data/attachments
|
||||||
|
#
|
||||||
|
# others are under APP_DATA_PATH
|
||||||
|
#
|
||||||
|
test -d $work_path/elsewhere/lfs
|
||||||
|
test -d $work_path/elsewhere/avatars
|
||||||
|
test -d $work_path/elsewhere/repo-avatars
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "================ using < 1.20.[12]-0 and conflicting sections ignores [storage.*]"
|
||||||
|
for version in 1.20.2-0 ; do
|
||||||
|
stop
|
||||||
|
reset misplace
|
||||||
|
start $version
|
||||||
|
for path in ${STORAGE_PATHS} ; do
|
||||||
|
test -d $work_path/elsewhere/$path
|
||||||
|
done
|
||||||
|
done
|
||||||
|
|
||||||
|
stop
|
||||||
|
|
||||||
|
echo "================ upgrading from 1.20.2-0 with conflicting sections fails"
|
||||||
|
download 1.20.3-0
|
||||||
|
timeout 60 $DIR/forgejo-1.20.3-0 --config $work_path/app.ini --work-path $work_path || true
|
||||||
|
for path in ${STORAGE_PATHS} ; do
|
||||||
|
if ! grep --fixed-strings --quiet "[storage.$path] may conflict" $work_path/log/forgejo.log ; then
|
||||||
|
cat $work_path/log/forgejo.log
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
function test_successful_upgrades() {
|
||||||
|
for config in default specific ; do
|
||||||
|
echo "================ using $config app.ini"
|
||||||
|
reset $config
|
||||||
|
|
||||||
|
for version in 1.18.5-0 1.19.4-0 1.20.2-0 1.20.3-0 1.21.0-0 ; do
|
||||||
|
echo "================ run $version"
|
||||||
|
cleanup_storage
|
||||||
|
start $version
|
||||||
|
verify_storage
|
||||||
|
stop
|
||||||
|
done
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
function test_upgrades() {
|
||||||
|
stop
|
||||||
|
dependencies
|
||||||
|
build_all
|
||||||
|
test_successful_upgrades
|
||||||
|
test_bug_storage_misplace
|
||||||
|
test_bug_storage_merged
|
||||||
|
test_downgrade_1.20.2_fails
|
||||||
|
}
|
||||||
|
|
||||||
|
"$@"
|
|
@ -14,7 +14,7 @@ jobs:
|
||||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||||
- uses: https://code.forgejo.org/actions/setup-go@v4
|
- uses: https://code.forgejo.org/actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version: ">=1.20"
|
go-version: "1.20"
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- run: make deps-backend deps-tools
|
- run: make deps-backend deps-tools
|
||||||
- run: make lint-backend
|
- run: make lint-backend
|
||||||
|
@ -26,7 +26,7 @@ jobs:
|
||||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||||
- uses: https://code.forgejo.org/actions/setup-go@v4
|
- uses: https://code.forgejo.org/actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version: ">=1.20"
|
go-version: "1.20"
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- run: make deps-backend deps-tools
|
- run: make deps-backend deps-tools
|
||||||
- run: make --always-make checks-backend # ensure the "go-licenses" make target runs
|
- run: make --always-make checks-backend # ensure the "go-licenses" make target runs
|
||||||
|
@ -39,7 +39,7 @@ jobs:
|
||||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||||
- uses: https://code.forgejo.org/actions/setup-go@v4
|
- uses: https://code.forgejo.org/actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version: ">=1.20.0"
|
go-version: "1.20"
|
||||||
- run: |
|
- run: |
|
||||||
git config --add safe.directory '*'
|
git config --add safe.directory '*'
|
||||||
chown -R gitea:gitea . /go
|
chown -R gitea:gitea . /go
|
||||||
|
@ -76,7 +76,7 @@ jobs:
|
||||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||||
- uses: https://code.forgejo.org/actions/setup-go@v4
|
- uses: https://code.forgejo.org/actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version: ">=1.20.0"
|
go-version: "1.20"
|
||||||
- run: |
|
- run: |
|
||||||
git config --add safe.directory '*'
|
git config --add safe.directory '*'
|
||||||
chown -R gitea:gitea . /go
|
chown -R gitea:gitea . /go
|
||||||
|
@ -109,7 +109,7 @@ jobs:
|
||||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||||
- uses: https://code.forgejo.org/actions/setup-go@v4
|
- uses: https://code.forgejo.org/actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version: ">=1.20.0"
|
go-version: "1.20"
|
||||||
- run: |
|
- run: |
|
||||||
git config --add safe.directory '*'
|
git config --add safe.directory '*'
|
||||||
chown -R gitea:gitea . /go
|
chown -R gitea:gitea . /go
|
||||||
|
@ -136,7 +136,7 @@ jobs:
|
||||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||||
- uses: https://code.forgejo.org/actions/setup-go@v4
|
- uses: https://code.forgejo.org/actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version: ">=1.20.0"
|
go-version: "1.20"
|
||||||
- run: |
|
- run: |
|
||||||
git config --add safe.directory '*'
|
git config --add safe.directory '*'
|
||||||
chown -R gitea:gitea . /go
|
chown -R gitea:gitea . /go
|
||||||
|
@ -154,3 +154,33 @@ jobs:
|
||||||
RACE_ENABLED: true
|
RACE_ENABLED: true
|
||||||
TEST_TAGS: gogit sqlite sqlite_unlock_notify
|
TEST_TAGS: gogit sqlite sqlite_unlock_notify
|
||||||
USE_REPO_TEST_DIR: 1
|
USE_REPO_TEST_DIR: 1
|
||||||
|
upgrade:
|
||||||
|
needs: [test-sqlite]
|
||||||
|
runs-on: docker
|
||||||
|
container:
|
||||||
|
image: codeberg.org/forgejo/test_env:main
|
||||||
|
steps:
|
||||||
|
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||||
|
- uses: https://code.forgejo.org/actions/setup-go@v4
|
||||||
|
with:
|
||||||
|
go-version: "1.20"
|
||||||
|
- run: |
|
||||||
|
git config --add safe.directory '*'
|
||||||
|
chown -R gitea:gitea . /go
|
||||||
|
- run: |
|
||||||
|
su gitea -c 'make deps-backend'
|
||||||
|
- run: |
|
||||||
|
su gitea -c 'make backend'
|
||||||
|
env:
|
||||||
|
TAGS: bindata sqlite sqlite_unlock_notify
|
||||||
|
- run: |
|
||||||
|
su gitea -c 'make gitea'
|
||||||
|
cp -a gitea /tmp/forgejo-development
|
||||||
|
timeout-minutes: 50
|
||||||
|
env:
|
||||||
|
TAGS: bindata sqlite sqlite_unlock_notify
|
||||||
|
- run: |
|
||||||
|
script=$(pwd)/.forgejo/upgrades/test-upgrade.sh
|
||||||
|
$script dependencies
|
||||||
|
su gitea -c "$script test_upgrades"
|
||||||
|
|
||||||
|
|
66
CHANGELOG.md
66
CHANGELOG.md
|
@ -2,9 +2,69 @@
|
||||||
|
|
||||||
This changelog goes through all the changes that have been made in each release
|
This changelog goes through all the changes that have been made in each release
|
||||||
without substantial changes to our git log; to see the highlights of what has
|
without substantial changes to our git log; to see the highlights of what has
|
||||||
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||||
|
|
||||||
## [1.20.2](https://github.com/go-gitea/gitea/releases/tag/1.20.2) - 2023-07-29
|
## [1.20.3](https://github.com/go-gitea/gitea/releases/tag/v1.20.3) - 2023-08-20
|
||||||
|
|
||||||
|
* BREAKING
|
||||||
|
* Fix the wrong derive path (#26271) (#26318)
|
||||||
|
* SECURITY
|
||||||
|
* Fix API leaking Usermail if not logged in (#25097) (#26350)
|
||||||
|
* FEATURES
|
||||||
|
* Add ThreadID parameter for Telegram webhooks (#25996) (#26480)
|
||||||
|
* ENHANCEMENTS
|
||||||
|
* Add minimum polyfill to support "relative-time-element" in PaleMoon (#26575) (#26578)
|
||||||
|
* Fix dark theme highlight for "NameNamespace" (#26519) (#26527)
|
||||||
|
* Detect ogg mime-type as audio or video (#26494) (#26505)
|
||||||
|
* Use `object-fit: contain` for oauth2 custom icons (#26493) (#26498)
|
||||||
|
* Move dropzone progress bar to bottom to show filename when uploading (#26492) (#26497)
|
||||||
|
* Remove last newline from config file (#26468) (#26471)
|
||||||
|
* Minio: add missing region on client initialization (#26412) (#26438)
|
||||||
|
* Add pull request review request webhook event (#26401) (#26407)
|
||||||
|
* Fix text truncate (#26354) (#26384)
|
||||||
|
* Fix incorrect color of selected assignees when create issue (#26324) (#26372)
|
||||||
|
* Display human-readable text instead of cryptic filemodes (#26352) (#26358)
|
||||||
|
* Hide `last indexed SHA` when a repo could not be indexed yet (#26340) (#26345)
|
||||||
|
* Fix the topic validation rule and suport dots (#26286) (#26303)
|
||||||
|
* Fix due date rendering the wrong date in issue (#26268) (#26274)
|
||||||
|
* Don't autosize textarea in diff view (#26233) (#26244)
|
||||||
|
* Fix commit compare style (#26209) (#26226)
|
||||||
|
* Warn instead of reporting an error when a webhook cannot be found (#26039) (#26211)
|
||||||
|
* BUGFIXES
|
||||||
|
* Use "input" event instead of "keyup" event for migration form (#26602) (#26605)
|
||||||
|
* Do not use deprecated log config options by default (#26592) (#26600)
|
||||||
|
* Fix "issueReposQueryPattern does not match query" (#26556) (#26564)
|
||||||
|
* Sync repo's IsEmpty status correctly (#26517) (#26560)
|
||||||
|
* Fix project filter bugs (#26490) (#26558)
|
||||||
|
* Use `hidden` over `clip` for text truncation (#26520) (#26522)
|
||||||
|
* Set "type=button" for editor's toolbar buttons (#26510) (#26518)
|
||||||
|
* Fix NuGet search endpoints (#25613) (#26499)
|
||||||
|
* Fix storage path logic especially for relative paths (#26441) (#26481)
|
||||||
|
* Close stdout correctly for "git blame" (#26470) (#26473)
|
||||||
|
* Check first if minio bucket exists before trying to create it (#26420) (#26465)
|
||||||
|
* Avoiding accessing undefined tributeValues #26461 (#26462)
|
||||||
|
* Call git.InitSimple for runRepoSyncReleases (#26396) (#26450)
|
||||||
|
* Add transaction when creating pull request created dirty data (#26259) (#26437)
|
||||||
|
* Fix wrong middleware sequence (#26428) (#26436)
|
||||||
|
* Fix admin queue page title and fix CI failures (#26409) (#26421)
|
||||||
|
* Introduce ctx.PathParamRaw to avoid incorrect unescaping (#26392) (#26405)
|
||||||
|
* Bypass MariaDB performance bug of the "IN" sub-query, fix incorrect IssueIndex (#26279) (#26368)
|
||||||
|
* Fix incorrect CLI exit code and duplicate error message (#26346) (#26347)
|
||||||
|
* Prevent newline errors with Debian packages (#26332) (#26342)
|
||||||
|
* Fix bug with sqlite load read (#26305) (#26339)
|
||||||
|
* Make git batch operations use parent context timeout instead of default timeout (#26325) (#26330)
|
||||||
|
* Support getting changed files when commit ID is `EmptySHA` (#26290) (#26316)
|
||||||
|
* Clarify the logger's MODE config option (#26267) (#26281)
|
||||||
|
* Use shared template for webhook icons (#26242) (#26246)
|
||||||
|
* Fix pull request check list is limited (#26179) (#26245)
|
||||||
|
* Fix attachment clipboard copy on insecure origin (#26224) (#26231)
|
||||||
|
* Fix access check for org-level project (#26182) (#26223)
|
||||||
|
* MISC
|
||||||
|
* Improve profile readme rendering (#25988) (#26453)
|
||||||
|
* [docs] Add missing backtick in quickstart.zh-cn.md (#26349) (#26357)
|
||||||
|
* Upgrade x/net to 0.13.0 (#26301)
|
||||||
|
|
||||||
|
## [1.20.2](https://github.com/go-gitea/gitea/releases/tag/v1.20.2) - 2023-07-29
|
||||||
|
|
||||||
* ENHANCEMENTS
|
* ENHANCEMENTS
|
||||||
* Calculate MAX_WORKERS default value by CPU number (#26177) (#26183)
|
* Calculate MAX_WORKERS default value by CPU number (#26177) (#26183)
|
||||||
|
@ -32,7 +92,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||||
* Fix wrong workflow status when rerun a job in an already finished workflow (#26119) (#26124)
|
* Fix wrong workflow status when rerun a job in an already finished workflow (#26119) (#26124)
|
||||||
* Fix duplicated url prefix on issue context menu (#26066) (#26067)
|
* Fix duplicated url prefix on issue context menu (#26066) (#26067)
|
||||||
|
|
||||||
## [1.20.1](https://github.com/go-gitea/gitea/releases/tag/1.20.1) - 2023-07-22
|
## [1.20.1](https://github.com/go-gitea/gitea/releases/tag/v1.20.1) - 2023-07-22
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Disallow dangerous URL schemes (#25960) (#25964)
|
* Disallow dangerous URL schemes (#25960) (#25964)
|
||||||
|
|
4
Makefile
4
Makefile
|
@ -89,9 +89,9 @@ endif
|
||||||
VERSION = ${GITEA_VERSION}
|
VERSION = ${GITEA_VERSION}
|
||||||
|
|
||||||
# SemVer
|
# SemVer
|
||||||
FORGEJO_VERSION := 5.0.1+0-gitea-1.20.2
|
FORGEJO_VERSION := 5.0.2+0-gitea-1.20.3
|
||||||
|
|
||||||
LDFLAGS := $(LDFLAGS) -X "main.MakeVersion=$(MAKE_VERSION)" -X "main.Version=$(GITEA_VERSION)" -X "main.Tags=$(TAGS)" -X "code.gitea.io/gitea/routers/api/forgejo/v1.ForgejoVersion=$(FORGEJO_VERSION)"
|
LDFLAGS := $(LDFLAGS) -X "main.MakeVersion=$(MAKE_VERSION)" -X "main.Version=$(GITEA_VERSION)" -X "main.Tags=$(TAGS)" -X "code.gitea.io/gitea/routers/api/forgejo/v1.ForgejoVersion=$(FORGEJO_VERSION)" -X "main.ForgejoVersion=$(FORGEJO_VERSION)"
|
||||||
|
|
||||||
LINUX_ARCHS ?= linux/amd64,linux/386,linux/arm-5,linux/arm-6,linux/arm64
|
LINUX_ARCHS ?= linux/amd64,linux/386,linux/arm-5,linux/arm-6,linux/arm64
|
||||||
|
|
||||||
|
|
|
@ -348,6 +348,10 @@ func runRepoSyncReleases(_ *cli.Context) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := git.InitSimple(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
log.Trace("Synchronizing repository releases (this may take a while)")
|
log.Trace("Synchronizing repository releases (this may take a while)")
|
||||||
for page := 1; ; page++ {
|
for page := 1; ; page++ {
|
||||||
repos, count, err := repo_model.SearchRepositoryByName(ctx, &repo_model.SearchRepoOptions{
|
repos, count, err := repo_model.SearchRepositoryByName(ctx, &repo_model.SearchRepoOptions{
|
||||||
|
|
26
cmd/main.go
Normal file
26
cmd/main.go
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/urfave/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
func RunMainApp(app *cli.App, args ...string) error {
|
||||||
|
err := app.Run(args)
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(err.Error(), "flag provided but not defined:") {
|
||||||
|
// the cli package should already have output the error message, so just exit
|
||||||
|
cli.OsExiter(1)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, _ = fmt.Fprintf(app.ErrWriter, "Command error: %v\n", err)
|
||||||
|
cli.OsExiter(1)
|
||||||
|
return err
|
||||||
|
}
|
|
@ -4,9 +4,16 @@
|
||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/unittest"
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
"code.gitea.io/gitea/modules/test"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/urfave/cli"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestMain(m *testing.M) {
|
func TestMain(m *testing.M) {
|
||||||
|
@ -14,3 +21,64 @@ func TestMain(m *testing.M) {
|
||||||
GiteaRootPath: "..",
|
GiteaRootPath: "..",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func newTestApp(testCmdAction func(ctx *cli.Context) error) *cli.App {
|
||||||
|
app := cli.NewApp()
|
||||||
|
app.HelpName = "gitea"
|
||||||
|
testCmd := cli.Command{Name: "test-cmd", Action: testCmdAction}
|
||||||
|
app.Commands = append(app.Commands, testCmd)
|
||||||
|
return app
|
||||||
|
}
|
||||||
|
|
||||||
|
type runResult struct {
|
||||||
|
Stdout string
|
||||||
|
Stderr string
|
||||||
|
ExitCode int
|
||||||
|
}
|
||||||
|
|
||||||
|
func runTestApp(app *cli.App, args ...string) (runResult, error) {
|
||||||
|
outBuf := new(strings.Builder)
|
||||||
|
errBuf := new(strings.Builder)
|
||||||
|
app.Writer = outBuf
|
||||||
|
app.ErrWriter = errBuf
|
||||||
|
exitCode := -1
|
||||||
|
defer test.MockVariableValue(&cli.ErrWriter, app.ErrWriter)()
|
||||||
|
defer test.MockVariableValue(&cli.OsExiter, func(code int) {
|
||||||
|
if exitCode == -1 {
|
||||||
|
exitCode = code // save the exit code once and then reset the writer (to simulate the exit)
|
||||||
|
app.Writer, app.ErrWriter, cli.ErrWriter = io.Discard, io.Discard, io.Discard
|
||||||
|
}
|
||||||
|
})()
|
||||||
|
err := RunMainApp(app, args...)
|
||||||
|
return runResult{outBuf.String(), errBuf.String(), exitCode}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCliCmdError(t *testing.T) {
|
||||||
|
app := newTestApp(func(ctx *cli.Context) error { return fmt.Errorf("normal error") })
|
||||||
|
r, err := runTestApp(app, "./gitea", "test-cmd")
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Equal(t, 1, r.ExitCode)
|
||||||
|
assert.Equal(t, "", r.Stdout)
|
||||||
|
assert.Equal(t, "Command error: normal error\n", r.Stderr)
|
||||||
|
|
||||||
|
app = newTestApp(func(ctx *cli.Context) error { return cli.NewExitError("exit error", 2) })
|
||||||
|
r, err = runTestApp(app, "./gitea", "test-cmd")
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Equal(t, 2, r.ExitCode)
|
||||||
|
assert.Equal(t, "", r.Stdout)
|
||||||
|
assert.Equal(t, "exit error\n", r.Stderr)
|
||||||
|
|
||||||
|
app = newTestApp(func(ctx *cli.Context) error { return nil })
|
||||||
|
r, err = runTestApp(app, "./gitea", "test-cmd", "--no-such")
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Equal(t, 1, r.ExitCode)
|
||||||
|
assert.EqualValues(t, "Incorrect Usage: flag provided but not defined: -no-such\n\nNAME:\n gitea test-cmd - \n\nUSAGE:\n gitea test-cmd [arguments...]\n", r.Stdout)
|
||||||
|
assert.Equal(t, "", r.Stderr) // the cli package's strange behavior, the error message is not in stderr ....
|
||||||
|
|
||||||
|
app = newTestApp(func(ctx *cli.Context) error { return nil })
|
||||||
|
r, err = runTestApp(app, "./gitea", "test-cmd")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, -1, r.ExitCode) // the cli.OsExiter is not called
|
||||||
|
assert.Equal(t, "", r.Stdout)
|
||||||
|
assert.Equal(t, "", r.Stderr)
|
||||||
|
}
|
||||||
|
|
|
@ -46,7 +46,6 @@ PATH = /data/gitea/attachments
|
||||||
[log]
|
[log]
|
||||||
MODE = console
|
MODE = console
|
||||||
LEVEL = info
|
LEVEL = info
|
||||||
ROUTER = console
|
|
||||||
ROOT_PATH = /data/gitea/log
|
ROOT_PATH = /data/gitea/log
|
||||||
|
|
||||||
[security]
|
[security]
|
||||||
|
|
|
@ -79,8 +79,9 @@ SMTP_PORT = 465
|
||||||
FROM = example.user@gmail.com
|
FROM = example.user@gmail.com
|
||||||
USER = example.user
|
USER = example.user
|
||||||
PASSWD = `***`
|
PASSWD = `***`
|
||||||
PROTOCOL = smtp
|
PROTOCOL = smtps ; Gitea >= 1.19.0
|
||||||
IS_TLS_ENABLED = true
|
; PROTOCOL = smtp ; Gitea < 1.19.0
|
||||||
|
; IS_TLS_ENABLED = true ; Gitea < 1.19.0
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that you'll need to create and use an [App password](https://support.google.com/accounts/answer/185833?hl=en) by enabling 2FA on your Google
|
Note that you'll need to create and use an [App password](https://support.google.com/accounts/answer/185833?hl=en) by enabling 2FA on your Google
|
||||||
|
|
|
@ -102,8 +102,11 @@ MODE = file, file-error
|
||||||
|
|
||||||
; by default, the "file" mode will record logs to %(log.ROOT_PATH)/gitea.log, so we don't need to set it
|
; by default, the "file" mode will record logs to %(log.ROOT_PATH)/gitea.log, so we don't need to set it
|
||||||
; [log.file]
|
; [log.file]
|
||||||
|
; by default, the MODE (actually it's the output writer of this logger) is taken from the section name, so we don't need to set it either
|
||||||
|
; MODE = file
|
||||||
|
|
||||||
[log.file-error]
|
[log.file-error]
|
||||||
|
MODE = file
|
||||||
LEVEL = Error
|
LEVEL = Error
|
||||||
FILE_NAME = file-error.log
|
FILE_NAME = file-error.log
|
||||||
```
|
```
|
||||||
|
|
|
@ -40,7 +40,7 @@ apk add gitea
|
||||||
|
|
||||||
## Arch Linux
|
## Arch Linux
|
||||||
|
|
||||||
The rolling release distribution has [Gitea](https://www.archlinux.org/packages/community/x86_64/gitea/) in their official community repository and package updates are provided with new Gitea releases.
|
The rolling release distribution has [Gitea](https://www.archlinux.org/packages/extra/x86_64/gitea/) in their official extra repository and package updates are provided with new Gitea releases.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
pacman -S gitea
|
pacman -S gitea
|
||||||
|
|
|
@ -17,16 +17,20 @@ menu:
|
||||||
|
|
||||||
# Upgrade from an old Gitea
|
# Upgrade from an old Gitea
|
||||||
|
|
||||||
To update Gitea, download a newer version, stop the old one, perform a backup, and run the new one.
|
Follow below steps to ensure a smooth upgrade to a new Gitea version.
|
||||||
Every time a Gitea instance starts up, it checks whether a database migration should be run.
|
|
||||||
If a database migration is required, Gitea will take some time to complete the upgrade and then serve.
|
|
||||||
|
|
||||||
## Check the Changelog for breaking changes
|
## Check the Changelog for breaking changes
|
||||||
|
|
||||||
To make Gitea better, some breaking changes are unavoidable, especially for big milestone releases.
|
To make Gitea better, some breaking changes are unavoidable, especially for big milestone releases.
|
||||||
Before upgrade, please read the [Changelog on Gitea blog](https://blog.gitea.io/)
|
Before upgrading, please read the [Changelog on Gitea blog](https://blog.gitea.com/)
|
||||||
and check whether the breaking changes affect your Gitea instance.
|
and check whether the breaking changes affect your Gitea instance.
|
||||||
|
|
||||||
|
## Verify there are no deprecated configuration options
|
||||||
|
|
||||||
|
New versions of Gitea often come with changed configuration syntax or options which are usually displayed for
|
||||||
|
at least one release cycle inside at the top of the Site Administration panel. If these warnings are not
|
||||||
|
resolved, Gitea may refuse to start in the following version.
|
||||||
|
|
||||||
## Backup for downgrade
|
## Backup for downgrade
|
||||||
|
|
||||||
Gitea keeps compatibility for patch versions whose first two fields are the same (`a.b.x` -> `a.b.y`),
|
Gitea keeps compatibility for patch versions whose first two fields are the same (`a.b.x` -> `a.b.y`),
|
||||||
|
@ -60,6 +64,11 @@ Backup steps:
|
||||||
If you are using cloud services or filesystems with snapshot feature,
|
If you are using cloud services or filesystems with snapshot feature,
|
||||||
a snapshot for the Gitea data volume and related object storage is more convenient.
|
a snapshot for the Gitea data volume and related object storage is more convenient.
|
||||||
|
|
||||||
|
After all of steps have been prepared, download the new version, stop the application, perform a backup and
|
||||||
|
then start the new application. On each startup, Gitea verifies that the database is up to date and will automtically
|
||||||
|
perform any necessary migrations. Depending on the size of the database, this can take some additional time on the
|
||||||
|
first launch during which the application will be unavailable.
|
||||||
|
|
||||||
## Upgrade with Docker
|
## Upgrade with Docker
|
||||||
|
|
||||||
* `docker pull` the latest Gitea release.
|
* `docker pull` the latest Gitea release.
|
||||||
|
|
|
@ -15,16 +15,20 @@ menu:
|
||||||
|
|
||||||
# 从旧版 Gitea 升级
|
# 从旧版 Gitea 升级
|
||||||
|
|
||||||
想要升级 Gitea,只需要下载新版,停止运行旧版,进行数据备份,然后运行新版就好。
|
在升级之前,您需要做如下的准备工作。
|
||||||
每次 Gitea 实例启动时,它都会检查是否要进行数据库迁移。
|
|
||||||
如果需要进行数据库迁移,Gitea 会花一些时间完成升级然后继续服务。
|
|
||||||
|
|
||||||
## 为重大变更检查更新日志
|
## 为重大变更检查更新日志
|
||||||
|
|
||||||
为了让 Gitea 变得更好,进行重大变更是不可避免的,尤其是一些里程碑更新的发布。
|
为了让 Gitea 变得更好,进行重大变更是不可避免的,尤其是一些里程碑更新的发布。
|
||||||
在更新前,请 [在 Gitea 博客上阅读更新日志](https://blog.gitea.io/)
|
在更新前,请 [在 Gitea 博客上阅读更新日志](https://blog.gitea.com/)
|
||||||
并检查重大变更是否会影响你的 Gitea 实例。
|
并检查重大变更是否会影响你的 Gitea 实例。
|
||||||
|
|
||||||
|
## 在控制面板中检查过期的配置项
|
||||||
|
|
||||||
|
一些配置项可能会在后续版本中过期,你需要在控制面板中检查他们。如果不解决过期的配置项,
|
||||||
|
Gitea也许会在升级后无法重启。你可以访问 https://docs.gitea.com 获得要升级的版本
|
||||||
|
对应的文档来修改你的配置文件。
|
||||||
|
|
||||||
## 降级前的备份
|
## 降级前的备份
|
||||||
|
|
||||||
Gitea 会保留首二位版本号相同的版本的兼容性 (`a.b.x` -> `a.b.y`),
|
Gitea 会保留首二位版本号相同的版本的兼容性 (`a.b.x` -> `a.b.y`),
|
||||||
|
@ -56,6 +60,10 @@ Gitea 会保留首二位版本号相同的版本的兼容性 (`a.b.x` -> `a.b.y`
|
||||||
如果你在使用云服务或拥有快照功能的文件系统,
|
如果你在使用云服务或拥有快照功能的文件系统,
|
||||||
最好对 Gitea 的数据盘及相关资料存储进行一次快照。
|
最好对 Gitea 的数据盘及相关资料存储进行一次快照。
|
||||||
|
|
||||||
|
在所有上述步骤准备妥当之后,要升级 Gitea,只需要下载新版,停止运行旧版,进行数据备份,然后运行新版就好。
|
||||||
|
每次 Gitea 实例启动时,它都会检查是否要进行数据库迁移。
|
||||||
|
如果需要进行数据库迁移,Gitea 会花一些时间完成升级然后继续服务。
|
||||||
|
|
||||||
## 从 Docker 升级
|
## 从 Docker 升级
|
||||||
|
|
||||||
* `docker pull` 拉取 Gitea 的最新发布版。
|
* `docker pull` 拉取 Gitea 的最新发布版。
|
||||||
|
|
|
@ -123,7 +123,7 @@ jobs:
|
||||||
|
|
||||||
请注意,演示文件中包含一些表情符号。
|
请注意,演示文件中包含一些表情符号。
|
||||||
请确保您的数据库支持它们,特别是在使用MySQL时。
|
请确保您的数据库支持它们,特别是在使用MySQL时。
|
||||||
如果字符集不是`utf8mb4,将出现错误,例如`Error 1366 (HY000): Incorrect string value: '\\xF0\\x9F\\x8E\\x89 T...' for column 'name' at row 1`。
|
如果字符集不是`utf8mb4`,将出现错误,例如`Error 1366 (HY000): Incorrect string value: '\\xF0\\x9F\\x8E\\x89 T...' for column 'name' at row 1`。
|
||||||
有关更多信息,请参阅[数据库准备工作](installation/database-preparation.md#mysql)。
|
有关更多信息,请参阅[数据库准备工作](installation/database-preparation.md#mysql)。
|
||||||
|
|
||||||
或者,您可以从演示文件中删除所有表情符号,然后再尝试一次。
|
或者,您可以从演示文件中删除所有表情符号,然后再尝试一次。
|
||||||
|
|
|
@ -21,8 +21,8 @@ In Gitea `1.13`, support for [agit](https://git-repo.info/en/2020/03/agit-flow-a
|
||||||
|
|
||||||
## Creating PRs with Agit
|
## Creating PRs with Agit
|
||||||
|
|
||||||
Agit allows to create PRs while pushing code to the remote repo. \
|
Agit allows to create PRs while pushing code to the remote repo.
|
||||||
This can be done by pushing to the branch followed by a specific refspec (a location identifier known to git). \
|
This can be done by pushing to the branch followed by a specific refspec (a location identifier known to git).
|
||||||
The following example illustrates this:
|
The following example illustrates this:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
|
|
4
go.mod
4
go.mod
|
@ -104,12 +104,12 @@ require (
|
||||||
github.com/xanzy/go-gitlab v0.83.0
|
github.com/xanzy/go-gitlab v0.83.0
|
||||||
github.com/xeipuuv/gojsonschema v1.2.0
|
github.com/xeipuuv/gojsonschema v1.2.0
|
||||||
github.com/yohcop/openid-go v1.0.1
|
github.com/yohcop/openid-go v1.0.1
|
||||||
github.com/yuin/goldmark v1.5.4
|
github.com/yuin/goldmark v1.5.5
|
||||||
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20220924101305-151362477c87
|
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20220924101305-151362477c87
|
||||||
github.com/yuin/goldmark-meta v1.1.0
|
github.com/yuin/goldmark-meta v1.1.0
|
||||||
golang.org/x/crypto v0.11.0
|
golang.org/x/crypto v0.11.0
|
||||||
golang.org/x/image v0.7.0
|
golang.org/x/image v0.7.0
|
||||||
golang.org/x/net v0.12.0
|
golang.org/x/net v0.13.0
|
||||||
golang.org/x/oauth2 v0.8.0
|
golang.org/x/oauth2 v0.8.0
|
||||||
golang.org/x/sys v0.10.0
|
golang.org/x/sys v0.10.0
|
||||||
golang.org/x/text v0.11.0
|
golang.org/x/text v0.11.0
|
||||||
|
|
8
go.sum
8
go.sum
|
@ -1098,8 +1098,8 @@ github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de
|
||||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
github.com/yuin/goldmark v1.4.15/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.15/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
github.com/yuin/goldmark v1.5.4 h1:2uY/xC0roWy8IBEGLgB1ywIoEJFGmRrX21YQcvGZzjU=
|
github.com/yuin/goldmark v1.5.5 h1:IJznPe8wOzfIKETmMkd06F8nXkmlhaHqFRM9l1hAGsU=
|
||||||
github.com/yuin/goldmark v1.5.4/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.5.5/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20220924101305-151362477c87 h1:Py16JEzkSdKAtEFJjiaYLYBOWGXc1r/xHj/Q/5lA37k=
|
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20220924101305-151362477c87 h1:Py16JEzkSdKAtEFJjiaYLYBOWGXc1r/xHj/Q/5lA37k=
|
||||||
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20220924101305-151362477c87/go.mod h1:ovIvrum6DQJA4QsJSovrkC4saKHQVs7TvcaeO8AIl5I=
|
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20220924101305-151362477c87/go.mod h1:ovIvrum6DQJA4QsJSovrkC4saKHQVs7TvcaeO8AIl5I=
|
||||||
github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc=
|
github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc=
|
||||||
|
@ -1269,8 +1269,8 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
||||||
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
||||||
golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50=
|
golang.org/x/net v0.13.0 h1:Nvo8UFsZ8X3BhAC9699Z1j7XQ3rsZnUUm7jfBEk1ueY=
|
||||||
golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
|
golang.org/x/net v0.13.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
|
|
11
main.go
11
main.go
|
@ -38,8 +38,11 @@ var (
|
||||||
MakeVersion = ""
|
MakeVersion = ""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var ForgejoVersion = "1.0.0"
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
setting.AppVer = Version
|
setting.AppVer = Version
|
||||||
|
setting.ForgejoVersion = ForgejoVersion
|
||||||
setting.AppBuiltWith = formatBuiltWith()
|
setting.AppBuiltWith = formatBuiltWith()
|
||||||
setting.AppStartTime = time.Now().UTC()
|
setting.AppStartTime = time.Now().UTC()
|
||||||
}
|
}
|
||||||
|
@ -192,11 +195,13 @@ argument - which can alternatively be run by running the subcommand web.`
|
||||||
app.Commands = append(app.Commands, subCmdWithIni...)
|
app.Commands = append(app.Commands, subCmdWithIni...)
|
||||||
app.Commands = append(app.Commands, subCmdStandalone...)
|
app.Commands = append(app.Commands, subCmdStandalone...)
|
||||||
|
|
||||||
err := app.Run(os.Args)
|
cli.OsExiter = func(code int) {
|
||||||
if err != nil {
|
log.GetManager().Close()
|
||||||
_, _ = fmt.Fprintf(app.Writer, "\nFailed to run with %s: %v\n", os.Args, err)
|
os.Exit(code)
|
||||||
}
|
}
|
||||||
|
app.ErrWriter = os.Stderr
|
||||||
|
|
||||||
|
_ = cmd.RunMainApp(app, os.Args...) // all errors should have been handled by the RunMainApp
|
||||||
log.GetManager().Close()
|
log.GetManager().Close()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -281,7 +281,7 @@ func CreateTaskForRunner(ctx context.Context, runner *ActionRunner) (*ActionTask
|
||||||
if gots, err := jobparser.Parse(job.WorkflowPayload); err != nil {
|
if gots, err := jobparser.Parse(job.WorkflowPayload); err != nil {
|
||||||
return nil, false, fmt.Errorf("parse workflow of job %d: %w", job.ID, err)
|
return nil, false, fmt.Errorf("parse workflow of job %d: %w", job.ID, err)
|
||||||
} else if len(gots) != 1 {
|
} else if len(gots) != 1 {
|
||||||
return nil, false, fmt.Errorf("workflow of job %d: not signle workflow", job.ID)
|
return nil, false, fmt.Errorf("workflow of job %d: not single workflow", job.ID)
|
||||||
} else {
|
} else {
|
||||||
_, workflowJob = gots[0].Job()
|
_, workflowJob = gots[0].Job()
|
||||||
}
|
}
|
||||||
|
|
|
@ -685,18 +685,34 @@ func NotifyWatchersActions(acts []*Action) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeleteIssueActions delete all actions related with issueID
|
// DeleteIssueActions delete all actions related with issueID
|
||||||
func DeleteIssueActions(ctx context.Context, repoID, issueID int64) error {
|
func DeleteIssueActions(ctx context.Context, repoID, issueID, issueIndex int64) error {
|
||||||
// delete actions assigned to this issue
|
// delete actions assigned to this issue
|
||||||
subQuery := builder.Select("`id`").
|
e := db.GetEngine(ctx)
|
||||||
From("`comment`").
|
|
||||||
Where(builder.Eq{"`issue_id`": issueID})
|
// MariaDB has a performance bug: https://jira.mariadb.org/browse/MDEV-16289
|
||||||
if _, err := db.GetEngine(ctx).In("comment_id", subQuery).Delete(&Action{}); err != nil {
|
// so here it uses "DELETE ... WHERE IN" with pre-queried IDs.
|
||||||
|
var lastCommentID int64
|
||||||
|
commentIDs := make([]int64, 0, db.DefaultMaxInSize)
|
||||||
|
for {
|
||||||
|
commentIDs = commentIDs[:0]
|
||||||
|
err := e.Select("`id`").Table(&issues_model.Comment{}).
|
||||||
|
Where(builder.Eq{"issue_id": issueID}).And("`id` > ?", lastCommentID).
|
||||||
|
OrderBy("`id`").Limit(db.DefaultMaxInSize).
|
||||||
|
Find(&commentIDs)
|
||||||
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
} else if len(commentIDs) == 0 {
|
||||||
|
break
|
||||||
|
} else if _, err = db.GetEngine(ctx).In("comment_id", commentIDs).Delete(&Action{}); err != nil {
|
||||||
|
return err
|
||||||
|
} else {
|
||||||
|
lastCommentID = commentIDs[len(commentIDs)-1]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err := db.GetEngine(ctx).Table("action").Where("repo_id = ?", repoID).
|
_, err := e.Where("repo_id = ?", repoID).
|
||||||
In("op_type", ActionCreateIssue, ActionCreatePullRequest).
|
In("op_type", ActionCreateIssue, ActionCreatePullRequest).
|
||||||
Where("content LIKE ?", strconv.FormatInt(issueID, 10)+"|%").
|
Where("content LIKE ?", strconv.FormatInt(issueIndex, 10)+"|%"). // "IssueIndex|content..."
|
||||||
Delete(&Action{})
|
Delete(&Action{})
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
package activities_test
|
package activities_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"path"
|
"path"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -284,3 +285,36 @@ func TestConsistencyUpdateAction(t *testing.T) {
|
||||||
assert.NoError(t, db.GetEngine(db.DefaultContext).Where("id = ?", id).Find(&actions))
|
assert.NoError(t, db.GetEngine(db.DefaultContext).Where("id = ?", id).Find(&actions))
|
||||||
unittest.CheckConsistencyFor(t, &activities_model.Action{})
|
unittest.CheckConsistencyFor(t, &activities_model.Action{})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDeleteIssueActions(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
|
// load an issue
|
||||||
|
issue := unittest.AssertExistsAndLoadBean(t, &issue_model.Issue{ID: 4})
|
||||||
|
assert.NotEqualValues(t, issue.ID, issue.Index) // it needs to use different ID/Index to test the DeleteIssueActions to delete some actions by IssueIndex
|
||||||
|
|
||||||
|
// insert a comment
|
||||||
|
err := db.Insert(db.DefaultContext, &issue_model.Comment{Type: issue_model.CommentTypeComment, IssueID: issue.ID})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
comment := unittest.AssertExistsAndLoadBean(t, &issue_model.Comment{Type: issue_model.CommentTypeComment, IssueID: issue.ID})
|
||||||
|
|
||||||
|
// truncate action table and insert some actions
|
||||||
|
err = db.TruncateBeans(db.DefaultContext, &activities_model.Action{})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
err = db.Insert(db.DefaultContext, &activities_model.Action{
|
||||||
|
OpType: activities_model.ActionCommentIssue,
|
||||||
|
CommentID: comment.ID,
|
||||||
|
})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
err = db.Insert(db.DefaultContext, &activities_model.Action{
|
||||||
|
OpType: activities_model.ActionCreateIssue,
|
||||||
|
RepoID: issue.RepoID,
|
||||||
|
Content: fmt.Sprintf("%d|content...", issue.Index),
|
||||||
|
})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// assert that the actions exist, then delete them
|
||||||
|
unittest.AssertCount(t, &activities_model.Action{}, 2)
|
||||||
|
assert.NoError(t, activities_model.DeleteIssueActions(db.DefaultContext, issue.RepoID, issue.ID, issue.Index))
|
||||||
|
unittest.AssertCount(t, &activities_model.Action{}, 0)
|
||||||
|
}
|
||||||
|
|
|
@ -319,7 +319,7 @@ func createIssueNotification(ctx context.Context, userID int64, issue *issues_mo
|
||||||
}
|
}
|
||||||
|
|
||||||
func updateIssueNotification(ctx context.Context, userID, issueID, commentID, updatedByID int64) error {
|
func updateIssueNotification(ctx context.Context, userID, issueID, commentID, updatedByID int64) error {
|
||||||
notification, err := getIssueNotification(ctx, userID, issueID)
|
notification, err := GetIssueNotification(ctx, userID, issueID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -340,7 +340,8 @@ func updateIssueNotification(ctx context.Context, userID, issueID, commentID, up
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func getIssueNotification(ctx context.Context, userID, issueID int64) (*Notification, error) {
|
// GetIssueNotification return the notification about an issue
|
||||||
|
func GetIssueNotification(ctx context.Context, userID, issueID int64) (*Notification, error) {
|
||||||
notification := new(Notification)
|
notification := new(Notification)
|
||||||
_, err := db.GetEngine(ctx).
|
_, err := db.GetEngine(ctx).
|
||||||
Where("user_id = ?", userID).
|
Where("user_id = ?", userID).
|
||||||
|
@ -751,7 +752,7 @@ func GetUIDsAndNotificationCounts(since, until timeutil.TimeStamp) ([]UserIDCoun
|
||||||
|
|
||||||
// SetIssueReadBy sets issue to be read by given user.
|
// SetIssueReadBy sets issue to be read by given user.
|
||||||
func SetIssueReadBy(ctx context.Context, issueID, userID int64) error {
|
func SetIssueReadBy(ctx context.Context, issueID, userID int64) error {
|
||||||
if err := issues_model.UpdateIssueUserByRead(userID, issueID); err != nil {
|
if err := issues_model.UpdateIssueUserByRead(ctx, userID, issueID); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -759,7 +760,7 @@ func SetIssueReadBy(ctx context.Context, issueID, userID int64) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func setIssueNotificationStatusReadIfUnread(ctx context.Context, userID, issueID int64) error {
|
func setIssueNotificationStatusReadIfUnread(ctx context.Context, userID, issueID int64) error {
|
||||||
notification, err := getIssueNotification(ctx, userID, issueID)
|
notification, err := GetIssueNotification(ctx, userID, issueID)
|
||||||
// ignore if not exists
|
// ignore if not exists
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
|
@ -771,7 +772,7 @@ func setIssueNotificationStatusReadIfUnread(ctx context.Context, userID, issueID
|
||||||
|
|
||||||
notification.Status = NotificationStatusRead
|
notification.Status = NotificationStatusRead
|
||||||
|
|
||||||
_, err = db.GetEngine(ctx).ID(notification.ID).Update(notification)
|
_, err = db.GetEngine(ctx).ID(notification.ID).Cols("status").Update(notification)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
package activities_test
|
package activities_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
activities_model "code.gitea.io/gitea/models/activities"
|
activities_model "code.gitea.io/gitea/models/activities"
|
||||||
|
@ -109,3 +110,16 @@ func TestUpdateNotificationStatuses(t *testing.T) {
|
||||||
unittest.AssertExistsAndLoadBean(t,
|
unittest.AssertExistsAndLoadBean(t,
|
||||||
&activities_model.Notification{ID: notfPinned.ID, Status: activities_model.NotificationStatusPinned})
|
&activities_model.Notification{ID: notfPinned.ID, Status: activities_model.NotificationStatusPinned})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestSetIssueReadBy(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
|
||||||
|
issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
|
||||||
|
assert.NoError(t, db.WithTx(db.DefaultContext, func(ctx context.Context) error {
|
||||||
|
return activities_model.SetIssueReadBy(ctx, issue.ID, user.ID)
|
||||||
|
}))
|
||||||
|
|
||||||
|
nt, err := activities_model.GetIssueNotification(db.DefaultContext, user.ID, issue.ID)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.EqualValues(t, activities_model.NotificationStatusRead, nt.Status)
|
||||||
|
}
|
||||||
|
|
|
@ -20,3 +20,19 @@ func BuildCaseInsensitiveLike(key, value string) builder.Cond {
|
||||||
}
|
}
|
||||||
return builder.Like{"UPPER(" + key + ")", strings.ToUpper(value)}
|
return builder.Like{"UPPER(" + key + ")", strings.ToUpper(value)}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// BuilderDialect returns the xorm.Builder dialect of the engine
|
||||||
|
func BuilderDialect() string {
|
||||||
|
switch {
|
||||||
|
case setting.Database.Type.IsMySQL():
|
||||||
|
return builder.MYSQL
|
||||||
|
case setting.Database.Type.IsSQLite3():
|
||||||
|
return builder.SQLITE
|
||||||
|
case setting.Database.Type.IsPostgreSQL():
|
||||||
|
return builder.POSTGRES
|
||||||
|
case setting.Database.Type.IsMSSQL():
|
||||||
|
return builder.MSSQL
|
||||||
|
default:
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -47,6 +47,7 @@ type Engine interface {
|
||||||
Incr(column string, arg ...any) *xorm.Session
|
Incr(column string, arg ...any) *xorm.Session
|
||||||
Insert(...any) (int64, error)
|
Insert(...any) (int64, error)
|
||||||
Iterate(any, xorm.IterFunc) error
|
Iterate(any, xorm.IterFunc) error
|
||||||
|
IsTableExist(any) (bool, error)
|
||||||
Join(joinOperator string, tablename, condition any, args ...any) *xorm.Session
|
Join(joinOperator string, tablename, condition any, args ...any) *xorm.Session
|
||||||
SQL(any, ...any) *xorm.Session
|
SQL(any, ...any) *xorm.Session
|
||||||
Where(any, ...any) *xorm.Session
|
Where(any, ...any) *xorm.Session
|
||||||
|
|
|
@ -68,6 +68,7 @@ func TestPrimaryKeys(t *testing.T) {
|
||||||
|
|
||||||
whitelist := map[string]string{
|
whitelist := map[string]string{
|
||||||
"the_table_name_to_skip_checking": "Write a note here to explain why",
|
"the_table_name_to_skip_checking": "Write a note here to explain why",
|
||||||
|
"forgejo_sem_ver": "seriously dude",
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, bean := range beans {
|
for _, bean := range beans {
|
||||||
|
|
18
models/forgejo/semver/main_test.go
Normal file
18
models/forgejo/semver/main_test.go
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package semver
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
|
||||||
|
_ "code.gitea.io/gitea/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMain(m *testing.M) {
|
||||||
|
unittest.MainTest(m, &unittest.TestOptions{
|
||||||
|
GiteaRootPath: filepath.Join("..", "..", ".."),
|
||||||
|
})
|
||||||
|
}
|
80
models/forgejo/semver/semver.go
Normal file
80
models/forgejo/semver/semver.go
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package semver
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
|
||||||
|
"github.com/hashicorp/go-version"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
db.RegisterModel(new(ForgejoSemVer))
|
||||||
|
}
|
||||||
|
|
||||||
|
var DefaultVersionString = "1.0.0"
|
||||||
|
|
||||||
|
type ForgejoSemVer struct {
|
||||||
|
Version string
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetVersion(ctx context.Context) (*version.Version, error) {
|
||||||
|
return GetVersionWithEngine(db.GetEngine(ctx))
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetVersionWithEngine(e db.Engine) (*version.Version, error) {
|
||||||
|
versionString := DefaultVersionString
|
||||||
|
|
||||||
|
exists, err := e.IsTableExist("forgejo_sem_ver")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if exists {
|
||||||
|
var semver ForgejoSemVer
|
||||||
|
has, err := e.Get(&semver)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if has {
|
||||||
|
versionString = semver.Version
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
v, err := version.NewVersion(versionString)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetVersionString(ctx context.Context, versionString string) error {
|
||||||
|
return SetVersionStringWithEngine(db.GetEngine(ctx), versionString)
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetVersionStringWithEngine(e db.Engine, versionString string) error {
|
||||||
|
v, err := version.NewVersion(versionString)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return SetVersionWithEngine(e, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetVersion(ctx context.Context, v *version.Version) error {
|
||||||
|
return SetVersionWithEngine(db.GetEngine(ctx), v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetVersionWithEngine(e db.Engine, v *version.Version) error {
|
||||||
|
var semver ForgejoSemVer
|
||||||
|
has, err := e.Get(&semver)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !has {
|
||||||
|
_, err = e.Exec("insert into forgejo_sem_ver values (?)", v.String())
|
||||||
|
} else {
|
||||||
|
_, err = e.Exec("update forgejo_sem_ver set version = ?", v.String())
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
46
models/forgejo/semver/semver_test.go
Normal file
46
models/forgejo/semver/semver_test.go
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package semver
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
|
||||||
|
"github.com/hashicorp/go-version"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestForgejoSemVerSetGet(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
ctx := db.DefaultContext
|
||||||
|
|
||||||
|
newVersion, err := version.NewVersion("v1.2.3")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NoError(t, SetVersionString(ctx, newVersion.String()))
|
||||||
|
databaseVersion, err := GetVersion(ctx)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.EqualValues(t, newVersion.String(), databaseVersion.String())
|
||||||
|
assert.True(t, newVersion.Equal(databaseVersion))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestForgejoSemVerMissing(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
ctx := db.DefaultContext
|
||||||
|
e := db.GetEngine(ctx)
|
||||||
|
|
||||||
|
_, err := e.Exec("delete from forgejo_sem_ver")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
v, err := GetVersion(ctx)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.EqualValues(t, "1.0.0", v.String())
|
||||||
|
|
||||||
|
_, err = e.Exec("drop table forgejo_sem_ver")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
v, err = GetVersion(ctx)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.EqualValues(t, "1.0.0", v.String())
|
||||||
|
}
|
|
@ -8,6 +8,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/forgejo/semver"
|
||||||
forgejo_v1_20 "code.gitea.io/gitea/models/forgejo_migrations/v1_20"
|
forgejo_v1_20 "code.gitea.io/gitea/models/forgejo_migrations/v1_20"
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
@ -37,6 +38,8 @@ func NewMigration(desc string, fn func(*xorm.Engine) error) *Migration {
|
||||||
// Add new migrations to the bottom of the list.
|
// Add new migrations to the bottom of the list.
|
||||||
var migrations = []*Migration{
|
var migrations = []*Migration{
|
||||||
NewMigration("Add Forgejo Blocked Users table", forgejo_v1_20.AddForgejoBlockedUser),
|
NewMigration("Add Forgejo Blocked Users table", forgejo_v1_20.AddForgejoBlockedUser),
|
||||||
|
// v2 -> v3
|
||||||
|
NewMigration("create the forgejo_sem_ver table", forgejo_v1_20.CreateSemVerTable),
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetCurrentDBVersion returns the current Forgejo database version.
|
// GetCurrentDBVersion returns the current Forgejo database version.
|
||||||
|
@ -138,5 +141,10 @@ func Migrate(x *xorm.Engine) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
|
||||||
|
if err := x.Sync(new(semver.ForgejoSemVer)); err != nil {
|
||||||
|
return fmt.Errorf("sync: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return semver.SetVersionStringWithEngine(x, setting.ForgejoVersion)
|
||||||
}
|
}
|
||||||
|
|
15
models/forgejo_migrations/v1_20/v2.go
Normal file
15
models/forgejo_migrations/v1_20/v2.go
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package forgejo_v1_20 //nolint:revive
|
||||||
|
|
||||||
|
import (
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func CreateSemVerTable(x *xorm.Engine) error {
|
||||||
|
type ForgejoSemVer struct {
|
||||||
|
Version string
|
||||||
|
}
|
||||||
|
|
||||||
|
return x.Sync(new(ForgejoSemVer))
|
||||||
|
}
|
|
@ -283,9 +283,9 @@ func GetLatestCommitStatus(ctx context.Context, repoID int64, sha string, listOp
|
||||||
Where("repo_id = ?", repoID).And("sha = ?", sha).
|
Where("repo_id = ?", repoID).And("sha = ?", sha).
|
||||||
Select("max( id ) as id").
|
Select("max( id ) as id").
|
||||||
GroupBy("context_hash").OrderBy("max( id ) desc")
|
GroupBy("context_hash").OrderBy("max( id ) desc")
|
||||||
|
if !listOptions.IsListAll() {
|
||||||
sess = db.SetSessionPagination(sess, &listOptions)
|
sess = db.SetSessionPagination(sess, &listOptions)
|
||||||
|
}
|
||||||
count, err := sess.FindAndCount(&ids)
|
count, err := sess.FindAndCount(&ids)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, count, err
|
return nil, count, err
|
||||||
|
|
|
@ -155,6 +155,18 @@ func applyMilestoneCondition(sess *xorm.Session, opts *IssuesOptions) *xorm.Sess
|
||||||
return sess
|
return sess
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func applyProjectCondition(sess *xorm.Session, opts *IssuesOptions) *xorm.Session {
|
||||||
|
if opts.ProjectID > 0 { // specific project
|
||||||
|
sess.Join("INNER", "project_issue", "issue.id = project_issue.issue_id").
|
||||||
|
And("project_issue.project_id=?", opts.ProjectID)
|
||||||
|
} else if opts.ProjectID == db.NoConditionID { // show those that are in no project
|
||||||
|
sess.And(builder.NotIn("issue.id", builder.Select("issue_id").From("project_issue").And(builder.Neq{"project_id": 0})))
|
||||||
|
}
|
||||||
|
// opts.ProjectID == 0 means all projects,
|
||||||
|
// do not need to apply any condition
|
||||||
|
return sess
|
||||||
|
}
|
||||||
|
|
||||||
func applyRepoConditions(sess *xorm.Session, opts *IssuesOptions) *xorm.Session {
|
func applyRepoConditions(sess *xorm.Session, opts *IssuesOptions) *xorm.Session {
|
||||||
if len(opts.RepoIDs) == 1 {
|
if len(opts.RepoIDs) == 1 {
|
||||||
opts.RepoCond = builder.Eq{"issue.repo_id": opts.RepoIDs[0]}
|
opts.RepoCond = builder.Eq{"issue.repo_id": opts.RepoIDs[0]}
|
||||||
|
@ -213,12 +225,7 @@ func applyConditions(sess *xorm.Session, opts *IssuesOptions) *xorm.Session {
|
||||||
sess.And(builder.Lte{"issue.updated_unix": opts.UpdatedBeforeUnix})
|
sess.And(builder.Lte{"issue.updated_unix": opts.UpdatedBeforeUnix})
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.ProjectID > 0 {
|
applyProjectCondition(sess, opts)
|
||||||
sess.Join("INNER", "project_issue", "issue.id = project_issue.issue_id").
|
|
||||||
And("project_issue.project_id=?", opts.ProjectID)
|
|
||||||
} else if opts.ProjectID == db.NoConditionID { // show those that are in no project
|
|
||||||
sess.And(builder.NotIn("issue.id", builder.Select("issue_id").From("project_issue")))
|
|
||||||
}
|
|
||||||
|
|
||||||
if opts.ProjectBoardID != 0 {
|
if opts.ProjectBoardID != 0 {
|
||||||
if opts.ProjectBoardID > 0 {
|
if opts.ProjectBoardID > 0 {
|
||||||
|
|
|
@ -133,10 +133,7 @@ func getIssueStatsChunk(opts *IssuesOptions, issueIDs []int64) (*IssueStats, err
|
||||||
|
|
||||||
applyMilestoneCondition(sess, opts)
|
applyMilestoneCondition(sess, opts)
|
||||||
|
|
||||||
if opts.ProjectID > 0 {
|
applyProjectCondition(sess, opts)
|
||||||
sess.Join("INNER", "project_issue", "issue.id = project_issue.issue_id").
|
|
||||||
And("project_issue.project_id=?", opts.ProjectID)
|
|
||||||
}
|
|
||||||
|
|
||||||
if opts.AssigneeID > 0 {
|
if opts.AssigneeID > 0 {
|
||||||
applyAssigneeCondition(sess, opts.AssigneeID)
|
applyAssigneeCondition(sess, opts.AssigneeID)
|
||||||
|
|
|
@ -55,8 +55,8 @@ func NewIssueUsers(ctx context.Context, repo *repo_model.Repository, issue *Issu
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateIssueUserByRead updates issue-user relation for reading.
|
// UpdateIssueUserByRead updates issue-user relation for reading.
|
||||||
func UpdateIssueUserByRead(uid, issueID int64) error {
|
func UpdateIssueUserByRead(ctx context.Context, uid, issueID int64) error {
|
||||||
_, err := db.GetEngine(db.DefaultContext).Exec("UPDATE `issue_user` SET is_read=? WHERE uid=? AND issue_id=?", true, uid, issueID)
|
_, err := db.GetEngine(ctx).Exec("UPDATE `issue_user` SET is_read=? WHERE uid=? AND issue_id=?", true, uid, issueID)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -40,13 +40,13 @@ func TestUpdateIssueUserByRead(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
|
issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
|
||||||
|
|
||||||
assert.NoError(t, issues_model.UpdateIssueUserByRead(4, issue.ID))
|
assert.NoError(t, issues_model.UpdateIssueUserByRead(db.DefaultContext, 4, issue.ID))
|
||||||
unittest.AssertExistsAndLoadBean(t, &issues_model.IssueUser{IssueID: issue.ID, UID: 4}, "is_read=1")
|
unittest.AssertExistsAndLoadBean(t, &issues_model.IssueUser{IssueID: issue.ID, UID: 4}, "is_read=1")
|
||||||
|
|
||||||
assert.NoError(t, issues_model.UpdateIssueUserByRead(4, issue.ID))
|
assert.NoError(t, issues_model.UpdateIssueUserByRead(db.DefaultContext, 4, issue.ID))
|
||||||
unittest.AssertExistsAndLoadBean(t, &issues_model.IssueUser{IssueID: issue.ID, UID: 4}, "is_read=1")
|
unittest.AssertExistsAndLoadBean(t, &issues_model.IssueUser{IssueID: issue.ID, UID: 4}, "is_read=1")
|
||||||
|
|
||||||
assert.NoError(t, issues_model.UpdateIssueUserByRead(unittest.NonexistentID, unittest.NonexistentID))
|
assert.NoError(t, issues_model.UpdateIssueUserByRead(db.DefaultContext, unittest.NonexistentID, unittest.NonexistentID))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUpdateIssueUsersByMentions(t *testing.T) {
|
func TestUpdateIssueUsersByMentions(t *testing.T) {
|
||||||
|
|
|
@ -28,6 +28,7 @@ import (
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
forgejo_services "code.gitea.io/gitea/services/forgejo"
|
||||||
|
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
"xorm.io/xorm/names"
|
"xorm.io/xorm/names"
|
||||||
|
@ -554,6 +555,7 @@ func Migrate(x *xorm.Engine) error {
|
||||||
return fmt.Errorf("sync: %w", err)
|
return fmt.Errorf("sync: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var previousVersion int64
|
||||||
currentVersion := &Version{ID: 1}
|
currentVersion := &Version{ID: 1}
|
||||||
has, err := x.Get(currentVersion)
|
has, err := x.Get(currentVersion)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -567,6 +569,8 @@ func Migrate(x *xorm.Engine) error {
|
||||||
if _, err = x.InsertOne(currentVersion); err != nil {
|
if _, err = x.InsertOne(currentVersion); err != nil {
|
||||||
return fmt.Errorf("insert: %w", err)
|
return fmt.Errorf("insert: %w", err)
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
previousVersion = currentVersion.Version
|
||||||
}
|
}
|
||||||
|
|
||||||
v := currentVersion.Version
|
v := currentVersion.Version
|
||||||
|
@ -595,6 +599,10 @@ Please try upgrading to a lower version first (suggested v1.6.4), then upgrade t
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := forgejo_services.PreMigrationSanityChecks(x, previousVersion, setting.CfgProvider); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
// Migrate
|
// Migrate
|
||||||
for i, m := range migrations[v-minDBVersion:] {
|
for i, m := range migrations[v-minDBVersion:] {
|
||||||
log.Info("Migration[%d]: %s", v+int64(i), m.Description())
|
log.Info("Migration[%d]: %s", v+int64(i), m.Description())
|
||||||
|
|
70
models/packages/nuget/search.go
Normal file
70
models/packages/nuget/search.go
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package nuget
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
packages_model "code.gitea.io/gitea/models/packages"
|
||||||
|
|
||||||
|
"xorm.io/builder"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SearchVersions gets all versions of packages matching the search options
|
||||||
|
func SearchVersions(ctx context.Context, opts *packages_model.PackageSearchOptions) ([]*packages_model.PackageVersion, int64, error) {
|
||||||
|
cond := toConds(opts)
|
||||||
|
|
||||||
|
e := db.GetEngine(ctx)
|
||||||
|
|
||||||
|
total, err := e.
|
||||||
|
Where(cond).
|
||||||
|
Count(&packages_model.Package{})
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
inner := builder.
|
||||||
|
Dialect(db.BuilderDialect()). // builder needs the sql dialect to build the Limit() below
|
||||||
|
Select("*").
|
||||||
|
From("package").
|
||||||
|
Where(cond).
|
||||||
|
OrderBy("package.name ASC")
|
||||||
|
if opts.Paginator != nil {
|
||||||
|
skip, take := opts.GetSkipTake()
|
||||||
|
inner = inner.Limit(take, skip)
|
||||||
|
}
|
||||||
|
|
||||||
|
sess := e.
|
||||||
|
Where(opts.ToConds()).
|
||||||
|
Table("package_version").
|
||||||
|
Join("INNER", inner, "package.id = package_version.package_id")
|
||||||
|
|
||||||
|
pvs := make([]*packages_model.PackageVersion, 0, 10)
|
||||||
|
return pvs, total, sess.Find(&pvs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CountPackages counts all packages matching the search options
|
||||||
|
func CountPackages(ctx context.Context, opts *packages_model.PackageSearchOptions) (int64, error) {
|
||||||
|
return db.GetEngine(ctx).
|
||||||
|
Where(toConds(opts)).
|
||||||
|
Count(&packages_model.Package{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func toConds(opts *packages_model.PackageSearchOptions) builder.Cond {
|
||||||
|
var cond builder.Cond = builder.Eq{
|
||||||
|
"package.is_internal": opts.IsInternal.IsTrue(),
|
||||||
|
"package.owner_id": opts.OwnerID,
|
||||||
|
"package.type": packages_model.TypeNuGet,
|
||||||
|
}
|
||||||
|
if opts.Name.Value != "" {
|
||||||
|
if opts.Name.ExactMatch {
|
||||||
|
cond = cond.And(builder.Eq{"package.lower_name": strings.ToLower(opts.Name.Value)})
|
||||||
|
} else {
|
||||||
|
cond = cond.And(builder.Like{"package.lower_name", strings.ToLower(opts.Name.Value)})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return cond
|
||||||
|
}
|
|
@ -189,7 +189,7 @@ type PackageSearchOptions struct {
|
||||||
db.Paginator
|
db.Paginator
|
||||||
}
|
}
|
||||||
|
|
||||||
func (opts *PackageSearchOptions) toConds() builder.Cond {
|
func (opts *PackageSearchOptions) ToConds() builder.Cond {
|
||||||
cond := builder.NewCond()
|
cond := builder.NewCond()
|
||||||
if !opts.IsInternal.IsNone() {
|
if !opts.IsInternal.IsNone() {
|
||||||
cond = builder.Eq{
|
cond = builder.Eq{
|
||||||
|
@ -283,7 +283,7 @@ func (opts *PackageSearchOptions) configureOrderBy(e db.Engine) {
|
||||||
// SearchVersions gets all versions of packages matching the search options
|
// SearchVersions gets all versions of packages matching the search options
|
||||||
func SearchVersions(ctx context.Context, opts *PackageSearchOptions) ([]*PackageVersion, int64, error) {
|
func SearchVersions(ctx context.Context, opts *PackageSearchOptions) ([]*PackageVersion, int64, error) {
|
||||||
sess := db.GetEngine(ctx).
|
sess := db.GetEngine(ctx).
|
||||||
Where(opts.toConds()).
|
Where(opts.ToConds()).
|
||||||
Table("package_version").
|
Table("package_version").
|
||||||
Join("INNER", "package", "package.id = package_version.package_id")
|
Join("INNER", "package", "package.id = package_version.package_id")
|
||||||
|
|
||||||
|
@ -300,7 +300,7 @@ func SearchVersions(ctx context.Context, opts *PackageSearchOptions) ([]*Package
|
||||||
|
|
||||||
// SearchLatestVersions gets the latest version of every package matching the search options
|
// SearchLatestVersions gets the latest version of every package matching the search options
|
||||||
func SearchLatestVersions(ctx context.Context, opts *PackageSearchOptions) ([]*PackageVersion, int64, error) {
|
func SearchLatestVersions(ctx context.Context, opts *PackageSearchOptions) ([]*PackageVersion, int64, error) {
|
||||||
cond := opts.toConds().
|
cond := opts.ToConds().
|
||||||
And(builder.Expr("pv2.id IS NULL"))
|
And(builder.Expr("pv2.id IS NULL"))
|
||||||
|
|
||||||
joinCond := builder.Expr("package_version.package_id = pv2.package_id AND (package_version.created_unix < pv2.created_unix OR (package_version.created_unix = pv2.created_unix AND package_version.id < pv2.id))")
|
joinCond := builder.Expr("package_version.package_id = pv2.package_id AND (package_version.created_unix < pv2.created_unix OR (package_version.created_unix = pv2.created_unix AND package_version.id < pv2.id))")
|
||||||
|
@ -328,7 +328,7 @@ func SearchLatestVersions(ctx context.Context, opts *PackageSearchOptions) ([]*P
|
||||||
// ExistVersion checks if a version matching the search options exist
|
// ExistVersion checks if a version matching the search options exist
|
||||||
func ExistVersion(ctx context.Context, opts *PackageSearchOptions) (bool, error) {
|
func ExistVersion(ctx context.Context, opts *PackageSearchOptions) (bool, error) {
|
||||||
return db.GetEngine(ctx).
|
return db.GetEngine(ctx).
|
||||||
Where(opts.toConds()).
|
Where(opts.ToConds()).
|
||||||
Table("package_version").
|
Table("package_version").
|
||||||
Join("INNER", "package", "package.id = package_version.package_id").
|
Join("INNER", "package", "package.id = package_version.package_id").
|
||||||
Exist(new(PackageVersion))
|
Exist(new(PackageVersion))
|
||||||
|
@ -337,7 +337,7 @@ func ExistVersion(ctx context.Context, opts *PackageSearchOptions) (bool, error)
|
||||||
// CountVersions counts all versions of packages matching the search options
|
// CountVersions counts all versions of packages matching the search options
|
||||||
func CountVersions(ctx context.Context, opts *PackageSearchOptions) (int64, error) {
|
func CountVersions(ctx context.Context, opts *PackageSearchOptions) (int64, error) {
|
||||||
return db.GetEngine(ctx).
|
return db.GetEngine(ctx).
|
||||||
Where(opts.toConds()).
|
Where(opts.ToConds()).
|
||||||
Table("package_version").
|
Table("package_version").
|
||||||
Join("INNER", "package", "package.id = package_version.package_id").
|
Join("INNER", "package", "package.id = package_version.package_id").
|
||||||
Count(new(PackageVersion))
|
Count(new(PackageVersion))
|
||||||
|
|
|
@ -22,7 +22,7 @@ func init() {
|
||||||
db.RegisterModel(new(RepoTopic))
|
db.RegisterModel(new(RepoTopic))
|
||||||
}
|
}
|
||||||
|
|
||||||
var topicPattern = regexp.MustCompile(`^[a-z0-9][a-z0-9-]*$`)
|
var topicPattern = regexp.MustCompile(`^[a-z0-9][-.a-z0-9]*$`)
|
||||||
|
|
||||||
// Topic represents a topic of repositories
|
// Topic represents a topic of repositories
|
||||||
type Topic struct {
|
type Topic struct {
|
||||||
|
|
|
@ -69,6 +69,7 @@ func TestAddTopic(t *testing.T) {
|
||||||
func TestTopicValidator(t *testing.T) {
|
func TestTopicValidator(t *testing.T) {
|
||||||
assert.True(t, repo_model.ValidateTopic("12345"))
|
assert.True(t, repo_model.ValidateTopic("12345"))
|
||||||
assert.True(t, repo_model.ValidateTopic("2-test"))
|
assert.True(t, repo_model.ValidateTopic("2-test"))
|
||||||
|
assert.True(t, repo_model.ValidateTopic("foo.bar"))
|
||||||
assert.True(t, repo_model.ValidateTopic("test-3"))
|
assert.True(t, repo_model.ValidateTopic("test-3"))
|
||||||
assert.True(t, repo_model.ValidateTopic("first"))
|
assert.True(t, repo_model.ValidateTopic("first"))
|
||||||
assert.True(t, repo_model.ValidateTopic("second-test-topic"))
|
assert.True(t, repo_model.ValidateTopic("second-test-topic"))
|
||||||
|
@ -77,4 +78,5 @@ func TestTopicValidator(t *testing.T) {
|
||||||
assert.False(t, repo_model.ValidateTopic("$fourth-test,topic"))
|
assert.False(t, repo_model.ValidateTopic("$fourth-test,topic"))
|
||||||
assert.False(t, repo_model.ValidateTopic("-fifth-test-topic"))
|
assert.False(t, repo_model.ValidateTopic("-fifth-test-topic"))
|
||||||
assert.False(t, repo_model.ValidateTopic("sixth-go-project-topic-with-excess-length"))
|
assert.False(t, repo_model.ValidateTopic("sixth-go-project-topic-with-excess-length"))
|
||||||
|
assert.False(t, repo_model.ValidateTopic(".foo"))
|
||||||
}
|
}
|
||||||
|
|
|
@ -203,11 +203,16 @@ func UpdateUserTheme(u *User, themeName string) error {
|
||||||
return UpdateUserCols(db.DefaultContext, u, "theme")
|
return UpdateUserCols(db.DefaultContext, u, "theme")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetPlaceholderEmail returns an noreply email
|
||||||
|
func (u *User) GetPlaceholderEmail() string {
|
||||||
|
return fmt.Sprintf("%s@%s", u.LowerName, setting.Service.NoReplyAddress)
|
||||||
|
}
|
||||||
|
|
||||||
// GetEmail returns an noreply email, if the user has set to keep his
|
// GetEmail returns an noreply email, if the user has set to keep his
|
||||||
// email address private, otherwise the primary email address.
|
// email address private, otherwise the primary email address.
|
||||||
func (u *User) GetEmail() string {
|
func (u *User) GetEmail() string {
|
||||||
if u.KeepEmailPrivate {
|
if u.KeepEmailPrivate {
|
||||||
return fmt.Sprintf("%s@%s", u.LowerName, setting.Service.NoReplyAddress)
|
return u.GetPlaceholderEmail()
|
||||||
}
|
}
|
||||||
return u.Email
|
return u.Email
|
||||||
}
|
}
|
||||||
|
|
|
@ -143,6 +143,10 @@ func (b *Base) Params(p string) string {
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *Base) PathParamRaw(p string) string {
|
||||||
|
return chi.URLParam(b.Req, strings.TrimPrefix(p, ":"))
|
||||||
|
}
|
||||||
|
|
||||||
// ParamsInt64 returns the param on route as int64
|
// ParamsInt64 returns the param on route as int64
|
||||||
func (b *Base) ParamsInt64(p string) int64 {
|
func (b *Base) ParamsInt64(p string) int64 {
|
||||||
v, _ := strconv.ParseInt(b.Params(p), 10, 64)
|
v, _ := strconv.ParseInt(b.Params(p), 10, 64)
|
||||||
|
|
|
@ -74,6 +74,8 @@ func CatFileBatchCheck(ctx context.Context, repoPath string) (WriteCloserError,
|
||||||
Stdin: batchStdinReader,
|
Stdin: batchStdinReader,
|
||||||
Stdout: batchStdoutWriter,
|
Stdout: batchStdoutWriter,
|
||||||
Stderr: &stderr,
|
Stderr: &stderr,
|
||||||
|
|
||||||
|
UseContextTimeout: true,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
_ = batchStdoutWriter.CloseWithError(ConcatenateError(err, (&stderr).String()))
|
_ = batchStdoutWriter.CloseWithError(ConcatenateError(err, (&stderr).String()))
|
||||||
|
@ -124,6 +126,8 @@ func CatFileBatch(ctx context.Context, repoPath string) (WriteCloserError, *bufi
|
||||||
Stdin: batchStdinReader,
|
Stdin: batchStdinReader,
|
||||||
Stdout: batchStdoutWriter,
|
Stdout: batchStdoutWriter,
|
||||||
Stderr: &stderr,
|
Stderr: &stderr,
|
||||||
|
|
||||||
|
UseContextTimeout: true,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
_ = batchStdoutWriter.CloseWithError(ConcatenateError(err, (&stderr).String()))
|
_ = batchStdoutWriter.CloseWithError(ConcatenateError(err, (&stderr).String()))
|
||||||
|
|
|
@ -5,11 +5,14 @@ package git
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// BlamePart represents block of blame - continuous lines with one sha
|
// BlamePart represents block of blame - continuous lines with one sha
|
||||||
|
@ -115,15 +118,19 @@ func CreateBlameReader(ctx context.Context, repoPath, commitID, file string) (*B
|
||||||
done := make(chan error, 1)
|
done := make(chan error, 1)
|
||||||
|
|
||||||
go func(cmd *Command, dir string, stdout io.WriteCloser, done chan error) {
|
go func(cmd *Command, dir string, stdout io.WriteCloser, done chan error) {
|
||||||
if err := cmd.Run(&RunOpts{
|
stderr := bytes.Buffer{}
|
||||||
|
// TODO: it doesn't work for directories (the directories shouldn't be "blamed"), and the "err" should be returned by "Read" but not by "Close"
|
||||||
|
err := cmd.Run(&RunOpts{
|
||||||
UseContextTimeout: true,
|
UseContextTimeout: true,
|
||||||
Dir: dir,
|
Dir: dir,
|
||||||
Stdout: stdout,
|
Stdout: stdout,
|
||||||
Stderr: os.Stderr,
|
Stderr: &stderr,
|
||||||
}); err == nil {
|
})
|
||||||
stdout.Close()
|
|
||||||
}
|
|
||||||
done <- err
|
done <- err
|
||||||
|
_ = stdout.Close()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Error running git blame (dir: %v): %v, stderr: %v", repoPath, err, stderr.String())
|
||||||
|
}
|
||||||
}(cmd, repoPath, stdout, done)
|
}(cmd, repoPath, stdout, done)
|
||||||
|
|
||||||
bufferedReader := bufio.NewReader(reader)
|
bufferedReader := bufio.NewReader(reader)
|
||||||
|
|
|
@ -80,7 +80,7 @@ func InitRepository(ctx context.Context, repoPath string, bare bool) error {
|
||||||
// IsEmpty Check if repository is empty.
|
// IsEmpty Check if repository is empty.
|
||||||
func (repo *Repository) IsEmpty() (bool, error) {
|
func (repo *Repository) IsEmpty() (bool, error) {
|
||||||
var errbuf, output strings.Builder
|
var errbuf, output strings.Builder
|
||||||
if err := NewCommand(repo.Ctx).AddOptionFormat("--git-dir=%s", repo.Path).AddArguments("show-ref", "--head", "^HEAD$").
|
if err := NewCommand(repo.Ctx).AddOptionFormat("--git-dir=%s", repo.Path).AddArguments("rev-list", "-n", "1", "--all").
|
||||||
Run(&RunOpts{
|
Run(&RunOpts{
|
||||||
Dir: repo.Path,
|
Dir: repo.Path,
|
||||||
Stdout: &output,
|
Stdout: &output,
|
||||||
|
|
|
@ -280,8 +280,16 @@ func (repo *Repository) GetPatch(base, head string, w io.Writer) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetFilesChangedBetween returns a list of all files that have been changed between the given commits
|
// GetFilesChangedBetween returns a list of all files that have been changed between the given commits
|
||||||
|
// If base is undefined empty SHA (zeros), it only returns the files changed in the head commit
|
||||||
|
// If base is the SHA of an empty tree (EmptyTreeSHA), it returns the files changes from the initial commit to the head commit
|
||||||
func (repo *Repository) GetFilesChangedBetween(base, head string) ([]string, error) {
|
func (repo *Repository) GetFilesChangedBetween(base, head string) ([]string, error) {
|
||||||
stdout, _, err := NewCommand(repo.Ctx, "diff", "--name-only", "-z").AddDynamicArguments(base + ".." + head).RunStdString(&RunOpts{Dir: repo.Path})
|
cmd := NewCommand(repo.Ctx, "diff-tree", "--name-only", "--root", "--no-commit-id", "-r", "-z")
|
||||||
|
if base == EmptySHA {
|
||||||
|
cmd.AddDynamicArguments(head)
|
||||||
|
} else {
|
||||||
|
cmd.AddDynamicArguments(base, head)
|
||||||
|
}
|
||||||
|
stdout, _, err := cmd.RunStdString(&RunOpts{Dir: repo.Path})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -119,3 +119,42 @@ func TestReadWritePullHead(t *testing.T) {
|
||||||
err = repo.RemoveReference(PullPrefix + "1/head")
|
err = repo.RemoveReference(PullPrefix + "1/head")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestGetCommitFilesChanged(t *testing.T) {
|
||||||
|
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare")
|
||||||
|
repo, err := openRepositoryWithDefaultContext(bareRepo1Path)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
defer repo.Close()
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
base, head string
|
||||||
|
files []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
EmptySHA,
|
||||||
|
"95bb4d39648ee7e325106df01a621c530863a653",
|
||||||
|
[]string{"file1.txt"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
EmptySHA,
|
||||||
|
"8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2",
|
||||||
|
[]string{"file2.txt"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"95bb4d39648ee7e325106df01a621c530863a653",
|
||||||
|
"8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2",
|
||||||
|
[]string{"file2.txt"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
EmptyTreeSHA,
|
||||||
|
"8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2",
|
||||||
|
[]string{"file1.txt", "file2.txt"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
changedFiles, err := repo.GetFilesChangedBetween(tc.base, tc.head)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.ElementsMatch(t, tc.files, changedFiles)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -11,10 +11,10 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// EmptySHA defines empty git SHA
|
// EmptySHA defines empty git SHA (undefined, non-existent)
|
||||||
const EmptySHA = "0000000000000000000000000000000000000000"
|
const EmptySHA = "0000000000000000000000000000000000000000"
|
||||||
|
|
||||||
// EmptyTreeSHA is the SHA of an empty tree
|
// EmptyTreeSHA is the SHA of an empty tree, the root of all git repositories
|
||||||
const EmptyTreeSHA = "4b825dc642cb6eb9a060e54bf8d69288fbee4904"
|
const EmptyTreeSHA = "4b825dc642cb6eb9a060e54bf8d69288fbee4904"
|
||||||
|
|
||||||
// SHAFullLength is the full length of a git SHA
|
// SHAFullLength is the full length of a git SHA
|
||||||
|
|
|
@ -153,18 +153,30 @@ func (r *Writer) WriteRegularLink(l org.RegularLink) {
|
||||||
link = []byte(util.URLJoin(r.URLPrefix, lnk))
|
link = []byte(util.URLJoin(r.URLPrefix, lnk))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
switch l.Kind() {
|
||||||
|
case "image":
|
||||||
|
if l.Description == nil {
|
||||||
|
imageSrc := getMediaURL(link)
|
||||||
|
fmt.Fprintf(r, `<img src="%s" alt="%s" title="%s" />`, imageSrc, link, link)
|
||||||
|
} else {
|
||||||
|
description := strings.TrimPrefix(org.String(l.Description), "file:")
|
||||||
|
imageSrc := getMediaURL([]byte(description))
|
||||||
|
fmt.Fprintf(r, `<a href="%s"><img src="%s" alt="%s" /></a>`, link, imageSrc, imageSrc)
|
||||||
|
}
|
||||||
|
case "video":
|
||||||
|
if l.Description == nil {
|
||||||
|
imageSrc := getMediaURL(link)
|
||||||
|
fmt.Fprintf(r, `<video src="%s" title="%s">%s</video>`, imageSrc, link, link)
|
||||||
|
} else {
|
||||||
|
description := strings.TrimPrefix(org.String(l.Description), "file:")
|
||||||
|
videoSrc := getMediaURL([]byte(description))
|
||||||
|
fmt.Fprintf(r, `<a href="%s"><video src="%s" title="%s"></video></a>`, link, videoSrc, videoSrc)
|
||||||
|
}
|
||||||
|
default:
|
||||||
description := string(link)
|
description := string(link)
|
||||||
if l.Description != nil {
|
if l.Description != nil {
|
||||||
description = r.WriteNodesAsString(l.Description...)
|
description = r.WriteNodesAsString(l.Description...)
|
||||||
}
|
}
|
||||||
switch l.Kind() {
|
|
||||||
case "image":
|
|
||||||
imageSrc := getMediaURL(link)
|
|
||||||
fmt.Fprintf(r, `<img src="%s" alt="%s" title="%s" />`, imageSrc, description, description)
|
|
||||||
case "video":
|
|
||||||
videoSrc := getMediaURL(link)
|
|
||||||
fmt.Fprintf(r, `<video src="%s" title="%s">%s</video>`, videoSrc, description, description)
|
|
||||||
default:
|
|
||||||
fmt.Fprintf(r, `<a href="%s" title="%s">%s</a>`, link, description, description)
|
fmt.Fprintf(r, `<a href="%s" title="%s">%s</a>`, link, description, description)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,7 +42,7 @@ func TestRender_StandardLinks(t *testing.T) {
|
||||||
"<p><a href=\""+lnk+"\" title=\"WikiPage\">WikiPage</a></p>")
|
"<p><a href=\""+lnk+"\" title=\"WikiPage\">WikiPage</a></p>")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRender_Images(t *testing.T) {
|
func TestRender_Media(t *testing.T) {
|
||||||
setting.AppURL = AppURL
|
setting.AppURL = AppURL
|
||||||
setting.AppSubURL = AppSubURL
|
setting.AppSubURL = AppSubURL
|
||||||
|
|
||||||
|
@ -60,6 +60,18 @@ func TestRender_Images(t *testing.T) {
|
||||||
|
|
||||||
test("[[file:"+url+"]]",
|
test("[[file:"+url+"]]",
|
||||||
"<p><img src=\""+result+"\" alt=\""+result+"\" title=\""+result+"\" /></p>")
|
"<p><img src=\""+result+"\" alt=\""+result+"\" title=\""+result+"\" /></p>")
|
||||||
|
|
||||||
|
// With description.
|
||||||
|
test("[[https://example.com][https://example.com/example.svg]]",
|
||||||
|
`<p><a href="https://example.com"><img src="https://example.com/example.svg" alt="https://example.com/example.svg" /></a></p>`)
|
||||||
|
test("[[https://example.com][https://example.com/example.mp4]]",
|
||||||
|
`<p><a href="https://example.com"><video src="https://example.com/example.mp4" title="https://example.com/example.mp4"></video></a></p>`)
|
||||||
|
|
||||||
|
// Without description.
|
||||||
|
test("[[https://example.com/example.svg]]",
|
||||||
|
`<p><img src="https://example.com/example.svg" alt="https://example.com/example.svg" title="https://example.com/example.svg" /></p>`)
|
||||||
|
test("[[https://example.com/example.mp4]]",
|
||||||
|
`<p><video src="https://example.com/example.mp4" title="https://example.com/example.mp4">https://example.com/example.mp4</video></p>`)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRender_Source(t *testing.T) {
|
func TestRender_Source(t *testing.T) {
|
||||||
|
|
|
@ -172,19 +172,10 @@ func ParseControlFile(r io.Reader) (*Package, error) {
|
||||||
value := strings.TrimSpace(parts[1])
|
value := strings.TrimSpace(parts[1])
|
||||||
switch key {
|
switch key {
|
||||||
case "Package":
|
case "Package":
|
||||||
if !namePattern.MatchString(value) {
|
|
||||||
return nil, ErrInvalidName
|
|
||||||
}
|
|
||||||
p.Name = value
|
p.Name = value
|
||||||
case "Version":
|
case "Version":
|
||||||
if !versionPattern.MatchString(value) {
|
|
||||||
return nil, ErrInvalidVersion
|
|
||||||
}
|
|
||||||
p.Version = value
|
p.Version = value
|
||||||
case "Architecture":
|
case "Architecture":
|
||||||
if value == "" {
|
|
||||||
return nil, ErrInvalidArchitecture
|
|
||||||
}
|
|
||||||
p.Architecture = value
|
p.Architecture = value
|
||||||
case "Maintainer":
|
case "Maintainer":
|
||||||
a, err := mail.ParseAddress(value)
|
a, err := mail.ParseAddress(value)
|
||||||
|
@ -208,13 +199,23 @@ func ParseControlFile(r io.Reader) (*Package, error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !namePattern.MatchString(p.Name) {
|
||||||
|
return nil, ErrInvalidName
|
||||||
|
}
|
||||||
|
if !versionPattern.MatchString(p.Version) {
|
||||||
|
return nil, ErrInvalidVersion
|
||||||
|
}
|
||||||
|
if p.Architecture == "" {
|
||||||
|
return nil, ErrInvalidArchitecture
|
||||||
|
}
|
||||||
|
|
||||||
dependencies := strings.Split(depends.String(), ",")
|
dependencies := strings.Split(depends.String(), ",")
|
||||||
for i := range dependencies {
|
for i := range dependencies {
|
||||||
dependencies[i] = strings.TrimSpace(dependencies[i])
|
dependencies[i] = strings.TrimSpace(dependencies[i])
|
||||||
}
|
}
|
||||||
p.Metadata.Dependencies = dependencies
|
p.Metadata.Dependencies = dependencies
|
||||||
|
|
||||||
p.Control = control.String()
|
p.Control = strings.TrimSpace(control.String())
|
||||||
|
|
||||||
return p, nil
|
return p, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
package setting
|
package setting
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"os"
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
@ -133,6 +134,11 @@ func EnvironmentToConfig(cfg ConfigProvider, envs []string) (changed bool) {
|
||||||
log.Error("Error reading file for %s : %v", envKey, envValue, err)
|
log.Error("Error reading file for %s : %v", envKey, envValue, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if bytes.HasSuffix(fileContent, []byte("\r\n")) {
|
||||||
|
fileContent = fileContent[:len(fileContent)-2]
|
||||||
|
} else if bytes.HasSuffix(fileContent, []byte("\n")) {
|
||||||
|
fileContent = fileContent[:len(fileContent)-1]
|
||||||
|
}
|
||||||
keyValue = string(fileContent)
|
keyValue = string(fileContent)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -106,4 +106,19 @@ key = old
|
||||||
changed = EnvironmentToConfig(cfg, []string{"GITEA__sec__key__FILE=" + tmpFile})
|
changed = EnvironmentToConfig(cfg, []string{"GITEA__sec__key__FILE=" + tmpFile})
|
||||||
assert.True(t, changed)
|
assert.True(t, changed)
|
||||||
assert.Equal(t, "value-from-file", cfg.Section("sec").Key("key").String())
|
assert.Equal(t, "value-from-file", cfg.Section("sec").Key("key").String())
|
||||||
|
|
||||||
|
cfg, _ = NewConfigProviderFromData("")
|
||||||
|
_ = os.WriteFile(tmpFile, []byte("value-from-file\n"), 0o644)
|
||||||
|
EnvironmentToConfig(cfg, []string{"GITEA__sec__key__FILE=" + tmpFile})
|
||||||
|
assert.Equal(t, "value-from-file", cfg.Section("sec").Key("key").String())
|
||||||
|
|
||||||
|
cfg, _ = NewConfigProviderFromData("")
|
||||||
|
_ = os.WriteFile(tmpFile, []byte("value-from-file\r\n"), 0o644)
|
||||||
|
EnvironmentToConfig(cfg, []string{"GITEA__sec__key__FILE=" + tmpFile})
|
||||||
|
assert.Equal(t, "value-from-file", cfg.Section("sec").Key("key").String())
|
||||||
|
|
||||||
|
cfg, _ = NewConfigProviderFromData("")
|
||||||
|
_ = os.WriteFile(tmpFile, []byte("value-from-file\n\n"), 0o644)
|
||||||
|
EnvironmentToConfig(cfg, []string{"GITEA__sec__key__FILE=" + tmpFile})
|
||||||
|
assert.Equal(t, "value-from-file\n", cfg.Section("sec").Key("key").String())
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,6 +53,7 @@ type ConfigProvider interface {
|
||||||
Save() error
|
Save() error
|
||||||
SaveTo(filename string) error
|
SaveTo(filename string) error
|
||||||
|
|
||||||
|
GetFile() string
|
||||||
DisableSaving()
|
DisableSaving()
|
||||||
PrepareSaving() (ConfigProvider, error)
|
PrepareSaving() (ConfigProvider, error)
|
||||||
IsLoadedFromEmpty() bool
|
IsLoadedFromEmpty() bool
|
||||||
|
@ -251,6 +252,10 @@ func (p *iniConfigProvider) GetSection(name string) (ConfigSection, error) {
|
||||||
|
|
||||||
var errDisableSaving = errors.New("this config can't be saved, developers should prepare a new config to save")
|
var errDisableSaving = errors.New("this config can't be saved, developers should prepare a new config to save")
|
||||||
|
|
||||||
|
func (p *iniConfigProvider) GetFile() string {
|
||||||
|
return p.file
|
||||||
|
}
|
||||||
|
|
||||||
// Save saves the content into file
|
// Save saves the content into file
|
||||||
func (p *iniConfigProvider) Save() error {
|
func (p *iniConfigProvider) Save() error {
|
||||||
if p.disableSaving {
|
if p.disableSaving {
|
||||||
|
|
|
@ -63,7 +63,7 @@ func loadLFSFrom(rootCfg ConfigProvider) error {
|
||||||
LFS.JWTSecretBytes = make([]byte, 32)
|
LFS.JWTSecretBytes = make([]byte, 32)
|
||||||
n, err := base64.RawURLEncoding.Decode(LFS.JWTSecretBytes, []byte(LFS.JWTSecretBase64))
|
n, err := base64.RawURLEncoding.Decode(LFS.JWTSecretBytes, []byte(LFS.JWTSecretBase64))
|
||||||
|
|
||||||
if err != nil || n != 32 {
|
if (err != nil || n != 32) && InstallLock {
|
||||||
LFS.JWTSecretBase64, err = generate.NewJwtSecretBase64()
|
LFS.JWTSecretBase64, err = generate.NewJwtSecretBase64()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error generating JWT Secret for custom config: %v", err)
|
return fmt.Errorf("error generating JWT Secret for custom config: %v", err)
|
||||||
|
|
|
@ -165,7 +165,7 @@ func loadLogModeByName(rootCfg ConfigProvider, loggerName, modeName string) (wri
|
||||||
writerMode.WriterOption = writerOption
|
writerMode.WriterOption = writerOption
|
||||||
default:
|
default:
|
||||||
if !log.HasEventWriter(writerType) {
|
if !log.HasEventWriter(writerType) {
|
||||||
return "", "", writerMode, fmt.Errorf("invalid log writer type (mode): %s", writerType)
|
return "", "", writerMode, fmt.Errorf("invalid log writer type (mode): %s, maybe it needs something like 'MODE=file' in [log.%s] section", writerType, modeName)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,8 @@ import (
|
||||||
"code.gitea.io/gitea/modules/user"
|
"code.gitea.io/gitea/modules/user"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var ForgejoVersion = "1.0.0"
|
||||||
|
|
||||||
// settings
|
// settings
|
||||||
var (
|
var (
|
||||||
// AppVer is the version of the current build of Gitea. It is set in main.go from main.Version.
|
// AppVer is the version of the current build of Gitea. It is set in main.go from main.Version.
|
||||||
|
|
|
@ -84,102 +84,179 @@ func getDefaultStorageSection(rootCfg ConfigProvider) ConfigSection {
|
||||||
return storageSec
|
return storageSec
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// getStorage will find target section and extra special section first and then read override
|
||||||
|
// items from extra section
|
||||||
func getStorage(rootCfg ConfigProvider, name, typ string, sec ConfigSection) (*Storage, error) {
|
func getStorage(rootCfg ConfigProvider, name, typ string, sec ConfigSection) (*Storage, error) {
|
||||||
if name == "" {
|
if name == "" {
|
||||||
return nil, errors.New("no name for storage")
|
return nil, errors.New("no name for storage")
|
||||||
}
|
}
|
||||||
|
|
||||||
var targetSec ConfigSection
|
targetSec, tp, err := getStorageTargetSection(rootCfg, name, typ, sec)
|
||||||
if typ != "" {
|
if err != nil {
|
||||||
var err error
|
return nil, err
|
||||||
targetSec, err = rootCfg.GetSection(storageSectionName + "." + typ)
|
}
|
||||||
|
|
||||||
|
overrideSec := getStorageOverrideSection(rootCfg, targetSec, sec, tp, name)
|
||||||
|
|
||||||
|
targetType := targetSec.Key("STORAGE_TYPE").String()
|
||||||
|
switch targetType {
|
||||||
|
case string(LocalStorageType):
|
||||||
|
return getStorageForLocal(targetSec, overrideSec, tp, name)
|
||||||
|
case string(MinioStorageType):
|
||||||
|
return getStorageForMinio(targetSec, overrideSec, tp, name)
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unsupported storage type %q", targetType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type targetSecType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
targetSecIsTyp targetSecType = iota // target section is [storage.type] which the type from parameter
|
||||||
|
targetSecIsStorage // target section is [storage]
|
||||||
|
targetSecIsDefault // target section is the default value
|
||||||
|
targetSecIsStorageWithName // target section is [storage.name]
|
||||||
|
targetSecIsSec // target section is from the name seciont [name]
|
||||||
|
)
|
||||||
|
|
||||||
|
func getStorageSectionByType(rootCfg ConfigProvider, typ string) (ConfigSection, targetSecType, error) {
|
||||||
|
targetSec, err := rootCfg.GetSection(storageSectionName + "." + typ)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if !IsValidStorageType(StorageType(typ)) {
|
if !IsValidStorageType(StorageType(typ)) {
|
||||||
return nil, fmt.Errorf("get section via storage type %q failed: %v", typ, err)
|
return nil, 0, fmt.Errorf("get section via storage type %q failed: %v", typ, err)
|
||||||
}
|
}
|
||||||
|
// if typ is a valid storage type, but there is no [storage.local] or [storage.minio] section
|
||||||
|
// it's not an error
|
||||||
|
return nil, 0, nil
|
||||||
}
|
}
|
||||||
if targetSec != nil {
|
|
||||||
targetType := targetSec.Key("STORAGE_TYPE").String()
|
targetType := targetSec.Key("STORAGE_TYPE").String()
|
||||||
if targetType == "" {
|
if targetType == "" {
|
||||||
if !IsValidStorageType(StorageType(typ)) {
|
if !IsValidStorageType(StorageType(typ)) {
|
||||||
return nil, fmt.Errorf("unknow storage type %q", typ)
|
return nil, 0, fmt.Errorf("unknow storage type %q", typ)
|
||||||
}
|
}
|
||||||
targetSec.Key("STORAGE_TYPE").SetValue(typ)
|
targetSec.Key("STORAGE_TYPE").SetValue(typ)
|
||||||
} else if !IsValidStorageType(StorageType(targetType)) {
|
} else if !IsValidStorageType(StorageType(targetType)) {
|
||||||
return nil, fmt.Errorf("unknow storage type %q for section storage.%v", targetType, typ)
|
return nil, 0, fmt.Errorf("unknow storage type %q for section storage.%v", targetType, typ)
|
||||||
|
}
|
||||||
|
|
||||||
|
return targetSec, targetSecIsTyp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getStorageTargetSection(rootCfg ConfigProvider, name, typ string, sec ConfigSection) (ConfigSection, targetSecType, error) {
|
||||||
|
// check typ first
|
||||||
|
if typ == "" {
|
||||||
|
if sec != nil { // check sec's type secondly
|
||||||
|
typ = sec.Key("STORAGE_TYPE").String()
|
||||||
|
if IsValidStorageType(StorageType(typ)) {
|
||||||
|
if targetSec, _ := rootCfg.GetSection(storageSectionName + "." + typ); targetSec == nil {
|
||||||
|
return sec, targetSecIsSec, nil
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
storageNameSec, _ := rootCfg.GetSection(storageSectionName + "." + name)
|
if typ != "" {
|
||||||
|
targetSec, tp, err := getStorageSectionByType(rootCfg, typ)
|
||||||
|
if targetSec != nil || err != nil {
|
||||||
|
return targetSec, tp, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if targetSec == nil {
|
// check stoarge name thirdly
|
||||||
targetSec = sec
|
targetSec, _ := rootCfg.GetSection(storageSectionName + "." + name)
|
||||||
}
|
if targetSec != nil {
|
||||||
if targetSec == nil {
|
|
||||||
targetSec = storageNameSec
|
|
||||||
}
|
|
||||||
if targetSec == nil {
|
|
||||||
targetSec = getDefaultStorageSection(rootCfg)
|
|
||||||
} else {
|
|
||||||
targetType := targetSec.Key("STORAGE_TYPE").String()
|
targetType := targetSec.Key("STORAGE_TYPE").String()
|
||||||
switch {
|
switch {
|
||||||
case targetType == "":
|
case targetType == "":
|
||||||
if targetSec.Key("PATH").String() == "" {
|
if targetSec.Key("PATH").String() == "" { // both storage type and path are empty, use default
|
||||||
targetSec = getDefaultStorageSection(rootCfg)
|
return getDefaultStorageSection(rootCfg), targetSecIsDefault, nil
|
||||||
} else {
|
}
|
||||||
|
|
||||||
targetSec.Key("STORAGE_TYPE").SetValue("local")
|
targetSec.Key("STORAGE_TYPE").SetValue("local")
|
||||||
}
|
|
||||||
default:
|
default:
|
||||||
newTargetSec, _ := rootCfg.GetSection(storageSectionName + "." + targetType)
|
targetSec, tp, err := getStorageSectionByType(rootCfg, targetType)
|
||||||
if newTargetSec == nil {
|
if targetSec != nil || err != nil {
|
||||||
if !IsValidStorageType(StorageType(targetType)) {
|
return targetSec, tp, err
|
||||||
return nil, fmt.Errorf("invalid storage section %s.%q", storageSectionName, targetType)
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return targetSec, targetSecIsStorageWithName, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return getDefaultStorageSection(rootCfg), targetSecIsDefault, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// getStorageOverrideSection override section will be read SERVE_DIRECT, PATH, MINIO_BASE_PATH, MINIO_BUCKET to override the targetsec when possible
|
||||||
|
func getStorageOverrideSection(rootConfig ConfigProvider, targetSec, sec ConfigSection, targetSecType targetSecType, name string) ConfigSection {
|
||||||
|
if targetSecType == targetSecIsSec {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if sec != nil {
|
||||||
|
return sec
|
||||||
|
}
|
||||||
|
|
||||||
|
if targetSecType != targetSecIsStorageWithName {
|
||||||
|
nameSec, _ := rootConfig.GetSection(storageSectionName + "." + name)
|
||||||
|
return nameSec
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getStorageForLocal(targetSec, overrideSec ConfigSection, tp targetSecType, name string) (*Storage, error) {
|
||||||
|
storage := Storage{
|
||||||
|
Type: StorageType(targetSec.Key("STORAGE_TYPE").String()),
|
||||||
|
}
|
||||||
|
|
||||||
|
targetPath := ConfigSectionKeyString(targetSec, "PATH", "")
|
||||||
|
var fallbackPath string
|
||||||
|
if targetPath == "" { // no path
|
||||||
|
fallbackPath = filepath.Join(AppDataPath, name)
|
||||||
} else {
|
} else {
|
||||||
targetSec = newTargetSec
|
if tp == targetSecIsStorage || tp == targetSecIsDefault {
|
||||||
if IsValidStorageType(StorageType(targetType)) {
|
fallbackPath = filepath.Join(targetPath, name)
|
||||||
tp := targetSec.Key("STORAGE_TYPE").String()
|
} else {
|
||||||
if tp == "" {
|
fallbackPath = targetPath
|
||||||
targetSec.Key("STORAGE_TYPE").SetValue(targetType)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
if !filepath.IsAbs(fallbackPath) {
|
||||||
|
fallbackPath = filepath.Join(AppDataPath, fallbackPath)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
targetType := targetSec.Key("STORAGE_TYPE").String()
|
if overrideSec == nil { // no override section
|
||||||
if !IsValidStorageType(StorageType(targetType)) {
|
storage.Path = fallbackPath
|
||||||
return nil, fmt.Errorf("invalid storage type %q", targetType)
|
} else {
|
||||||
|
storage.Path = ConfigSectionKeyString(overrideSec, "PATH", "")
|
||||||
|
if storage.Path == "" { // overrideSec has no path
|
||||||
|
storage.Path = fallbackPath
|
||||||
|
} else if !filepath.IsAbs(storage.Path) {
|
||||||
|
if targetPath == "" {
|
||||||
|
storage.Path = filepath.Join(AppDataPath, storage.Path)
|
||||||
|
} else {
|
||||||
|
storage.Path = filepath.Join(targetPath, storage.Path)
|
||||||
}
|
}
|
||||||
|
|
||||||
var storage Storage
|
|
||||||
storage.Type = StorageType(targetType)
|
|
||||||
|
|
||||||
switch targetType {
|
|
||||||
case string(LocalStorageType):
|
|
||||||
storage.Path = ConfigSectionKeyString(targetSec, "PATH", filepath.Join(AppDataPath, name))
|
|
||||||
if !filepath.IsAbs(storage.Path) {
|
|
||||||
storage.Path = filepath.Join(AppWorkPath, storage.Path)
|
|
||||||
}
|
|
||||||
case string(MinioStorageType):
|
|
||||||
storage.MinioConfig.BasePath = name + "/"
|
|
||||||
|
|
||||||
if err := targetSec.MapTo(&storage.MinioConfig); err != nil {
|
|
||||||
return nil, fmt.Errorf("map minio config failed: %v", err)
|
|
||||||
}
|
|
||||||
// extra config section will be read SERVE_DIRECT, PATH, MINIO_BASE_PATH to override the targetsec
|
|
||||||
extraConfigSec := sec
|
|
||||||
if extraConfigSec == nil {
|
|
||||||
extraConfigSec = storageNameSec
|
|
||||||
}
|
|
||||||
|
|
||||||
if extraConfigSec != nil {
|
|
||||||
storage.MinioConfig.ServeDirect = ConfigSectionKeyBool(extraConfigSec, "SERVE_DIRECT", storage.MinioConfig.ServeDirect)
|
|
||||||
storage.MinioConfig.BasePath = ConfigSectionKeyString(extraConfigSec, "MINIO_BASE_PATH", storage.MinioConfig.BasePath)
|
|
||||||
storage.MinioConfig.Bucket = ConfigSectionKeyString(extraConfigSec, "MINIO_BUCKET", storage.MinioConfig.Bucket)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return &storage, nil
|
return &storage, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getStorageForMinio(targetSec, overrideSec ConfigSection, tp targetSecType, name string) (*Storage, error) {
|
||||||
|
var storage Storage
|
||||||
|
storage.Type = StorageType(targetSec.Key("STORAGE_TYPE").String())
|
||||||
|
if err := targetSec.MapTo(&storage.MinioConfig); err != nil {
|
||||||
|
return nil, fmt.Errorf("map minio config failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if storage.MinioConfig.BasePath == "" {
|
||||||
|
storage.MinioConfig.BasePath = name + "/"
|
||||||
|
}
|
||||||
|
|
||||||
|
if overrideSec != nil {
|
||||||
|
storage.MinioConfig.ServeDirect = ConfigSectionKeyBool(overrideSec, "SERVE_DIRECT", storage.MinioConfig.ServeDirect)
|
||||||
|
storage.MinioConfig.BasePath = ConfigSectionKeyString(overrideSec, "MINIO_BASE_PATH", storage.MinioConfig.BasePath)
|
||||||
|
storage.MinioConfig.Bucket = ConfigSectionKeyString(overrideSec, "MINIO_BUCKET", storage.MinioConfig.Bucket)
|
||||||
|
}
|
||||||
|
return &storage, nil
|
||||||
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
package setting
|
package setting
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
@ -26,12 +27,15 @@ MINIO_BUCKET = gitea-storage
|
||||||
|
|
||||||
assert.NoError(t, loadAttachmentFrom(cfg))
|
assert.NoError(t, loadAttachmentFrom(cfg))
|
||||||
assert.EqualValues(t, "gitea-attachment", Attachment.Storage.MinioConfig.Bucket)
|
assert.EqualValues(t, "gitea-attachment", Attachment.Storage.MinioConfig.Bucket)
|
||||||
|
assert.EqualValues(t, "attachments/", Attachment.Storage.MinioConfig.BasePath)
|
||||||
|
|
||||||
assert.NoError(t, loadLFSFrom(cfg))
|
assert.NoError(t, loadLFSFrom(cfg))
|
||||||
assert.EqualValues(t, "gitea-lfs", LFS.Storage.MinioConfig.Bucket)
|
assert.EqualValues(t, "gitea-lfs", LFS.Storage.MinioConfig.Bucket)
|
||||||
|
assert.EqualValues(t, "lfs/", LFS.Storage.MinioConfig.BasePath)
|
||||||
|
|
||||||
assert.NoError(t, loadAvatarsFrom(cfg))
|
assert.NoError(t, loadAvatarsFrom(cfg))
|
||||||
assert.EqualValues(t, "gitea-storage", Avatar.Storage.MinioConfig.Bucket)
|
assert.EqualValues(t, "gitea-storage", Avatar.Storage.MinioConfig.Bucket)
|
||||||
|
assert.EqualValues(t, "avatars/", Avatar.Storage.MinioConfig.BasePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
func Test_getStorageUseOtherNameAsType(t *testing.T) {
|
func Test_getStorageUseOtherNameAsType(t *testing.T) {
|
||||||
|
@ -48,9 +52,11 @@ MINIO_BUCKET = gitea-storage
|
||||||
|
|
||||||
assert.NoError(t, loadAttachmentFrom(cfg))
|
assert.NoError(t, loadAttachmentFrom(cfg))
|
||||||
assert.EqualValues(t, "gitea-storage", Attachment.Storage.MinioConfig.Bucket)
|
assert.EqualValues(t, "gitea-storage", Attachment.Storage.MinioConfig.Bucket)
|
||||||
|
assert.EqualValues(t, "attachments/", Attachment.Storage.MinioConfig.BasePath)
|
||||||
|
|
||||||
assert.NoError(t, loadLFSFrom(cfg))
|
assert.NoError(t, loadLFSFrom(cfg))
|
||||||
assert.EqualValues(t, "gitea-storage", LFS.Storage.MinioConfig.Bucket)
|
assert.EqualValues(t, "gitea-storage", LFS.Storage.MinioConfig.Bucket)
|
||||||
|
assert.EqualValues(t, "lfs/", LFS.Storage.MinioConfig.BasePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
func Test_getStorageInheritStorageType(t *testing.T) {
|
func Test_getStorageInheritStorageType(t *testing.T) {
|
||||||
|
@ -90,3 +96,319 @@ STORAGE_TYPE = minio
|
||||||
assert.EqualValues(t, "gitea", RepoAvatar.Storage.MinioConfig.Bucket)
|
assert.EqualValues(t, "gitea", RepoAvatar.Storage.MinioConfig.Bucket)
|
||||||
assert.EqualValues(t, "repo-avatars/", RepoAvatar.Storage.MinioConfig.BasePath)
|
assert.EqualValues(t, "repo-avatars/", RepoAvatar.Storage.MinioConfig.BasePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type testLocalStoragePathCase struct {
|
||||||
|
loader func(rootCfg ConfigProvider) error
|
||||||
|
storagePtr **Storage
|
||||||
|
expectedPath string
|
||||||
|
}
|
||||||
|
|
||||||
|
func testLocalStoragePath(t *testing.T, appDataPath, iniStr string, cases []testLocalStoragePathCase) {
|
||||||
|
cfg, err := NewConfigProviderFromData(iniStr)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
AppDataPath = appDataPath
|
||||||
|
for _, c := range cases {
|
||||||
|
assert.NoError(t, c.loader(cfg))
|
||||||
|
storage := *c.storagePtr
|
||||||
|
|
||||||
|
assert.EqualValues(t, "local", storage.Type)
|
||||||
|
assert.True(t, filepath.IsAbs(storage.Path))
|
||||||
|
assert.EqualValues(t, filepath.Clean(c.expectedPath), filepath.Clean(storage.Path))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageInheritStorageTypeLocal(t *testing.T) {
|
||||||
|
testLocalStoragePath(t, "/appdata", `
|
||||||
|
[storage]
|
||||||
|
STORAGE_TYPE = local
|
||||||
|
`, []testLocalStoragePathCase{
|
||||||
|
{loadAttachmentFrom, &Attachment.Storage, "/appdata/attachments"},
|
||||||
|
{loadLFSFrom, &LFS.Storage, "/appdata/lfs"},
|
||||||
|
{loadActionsFrom, &Actions.ArtifactStorage, "/appdata/actions_artifacts"},
|
||||||
|
{loadPackagesFrom, &Packages.Storage, "/appdata/packages"},
|
||||||
|
{loadRepoArchiveFrom, &RepoArchive.Storage, "/appdata/repo-archive"},
|
||||||
|
{loadActionsFrom, &Actions.LogStorage, "/appdata/actions_log"},
|
||||||
|
{loadAvatarsFrom, &Avatar.Storage, "/appdata/avatars"},
|
||||||
|
{loadRepoAvatarFrom, &RepoAvatar.Storage, "/appdata/repo-avatars"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageInheritStorageTypeLocalPath(t *testing.T) {
|
||||||
|
testLocalStoragePath(t, "/appdata", `
|
||||||
|
[storage]
|
||||||
|
STORAGE_TYPE = local
|
||||||
|
PATH = /data/gitea
|
||||||
|
`, []testLocalStoragePathCase{
|
||||||
|
{loadAttachmentFrom, &Attachment.Storage, "/data/gitea/attachments"},
|
||||||
|
{loadLFSFrom, &LFS.Storage, "/data/gitea/lfs"},
|
||||||
|
{loadActionsFrom, &Actions.ArtifactStorage, "/data/gitea/actions_artifacts"},
|
||||||
|
{loadPackagesFrom, &Packages.Storage, "/data/gitea/packages"},
|
||||||
|
{loadRepoArchiveFrom, &RepoArchive.Storage, "/data/gitea/repo-archive"},
|
||||||
|
{loadActionsFrom, &Actions.LogStorage, "/data/gitea/actions_log"},
|
||||||
|
{loadAvatarsFrom, &Avatar.Storage, "/data/gitea/avatars"},
|
||||||
|
{loadRepoAvatarFrom, &RepoAvatar.Storage, "/data/gitea/repo-avatars"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageInheritStorageTypeLocalRelativePath(t *testing.T) {
|
||||||
|
testLocalStoragePath(t, "/appdata", `
|
||||||
|
[storage]
|
||||||
|
STORAGE_TYPE = local
|
||||||
|
PATH = storages
|
||||||
|
`, []testLocalStoragePathCase{
|
||||||
|
{loadAttachmentFrom, &Attachment.Storage, "/appdata/storages/attachments"},
|
||||||
|
{loadLFSFrom, &LFS.Storage, "/appdata/storages/lfs"},
|
||||||
|
{loadActionsFrom, &Actions.ArtifactStorage, "/appdata/storages/actions_artifacts"},
|
||||||
|
{loadPackagesFrom, &Packages.Storage, "/appdata/storages/packages"},
|
||||||
|
{loadRepoArchiveFrom, &RepoArchive.Storage, "/appdata/storages/repo-archive"},
|
||||||
|
{loadActionsFrom, &Actions.LogStorage, "/appdata/storages/actions_log"},
|
||||||
|
{loadAvatarsFrom, &Avatar.Storage, "/appdata/storages/avatars"},
|
||||||
|
{loadRepoAvatarFrom, &RepoAvatar.Storage, "/appdata/storages/repo-avatars"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageInheritStorageTypeLocalPathOverride(t *testing.T) {
|
||||||
|
testLocalStoragePath(t, "/appdata", `
|
||||||
|
[storage]
|
||||||
|
STORAGE_TYPE = local
|
||||||
|
PATH = /data/gitea
|
||||||
|
|
||||||
|
[repo-archive]
|
||||||
|
PATH = /data/gitea/the-archives-dir
|
||||||
|
`, []testLocalStoragePathCase{
|
||||||
|
{loadAttachmentFrom, &Attachment.Storage, "/data/gitea/attachments"},
|
||||||
|
{loadLFSFrom, &LFS.Storage, "/data/gitea/lfs"},
|
||||||
|
{loadActionsFrom, &Actions.ArtifactStorage, "/data/gitea/actions_artifacts"},
|
||||||
|
{loadPackagesFrom, &Packages.Storage, "/data/gitea/packages"},
|
||||||
|
{loadRepoArchiveFrom, &RepoArchive.Storage, "/data/gitea/the-archives-dir"},
|
||||||
|
{loadActionsFrom, &Actions.LogStorage, "/data/gitea/actions_log"},
|
||||||
|
{loadAvatarsFrom, &Avatar.Storage, "/data/gitea/avatars"},
|
||||||
|
{loadRepoAvatarFrom, &RepoAvatar.Storage, "/data/gitea/repo-avatars"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageInheritStorageTypeLocalPathOverrideEmpty(t *testing.T) {
|
||||||
|
testLocalStoragePath(t, "/appdata", `
|
||||||
|
[storage]
|
||||||
|
STORAGE_TYPE = local
|
||||||
|
PATH = /data/gitea
|
||||||
|
|
||||||
|
[repo-archive]
|
||||||
|
`, []testLocalStoragePathCase{
|
||||||
|
{loadAttachmentFrom, &Attachment.Storage, "/data/gitea/attachments"},
|
||||||
|
{loadLFSFrom, &LFS.Storage, "/data/gitea/lfs"},
|
||||||
|
{loadActionsFrom, &Actions.ArtifactStorage, "/data/gitea/actions_artifacts"},
|
||||||
|
{loadPackagesFrom, &Packages.Storage, "/data/gitea/packages"},
|
||||||
|
{loadRepoArchiveFrom, &RepoArchive.Storage, "/data/gitea/repo-archive"},
|
||||||
|
{loadActionsFrom, &Actions.LogStorage, "/data/gitea/actions_log"},
|
||||||
|
{loadAvatarsFrom, &Avatar.Storage, "/data/gitea/avatars"},
|
||||||
|
{loadRepoAvatarFrom, &RepoAvatar.Storage, "/data/gitea/repo-avatars"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageInheritStorageTypeLocalRelativePathOverride(t *testing.T) {
|
||||||
|
testLocalStoragePath(t, "/appdata", `
|
||||||
|
[storage]
|
||||||
|
STORAGE_TYPE = local
|
||||||
|
PATH = /data/gitea
|
||||||
|
|
||||||
|
[repo-archive]
|
||||||
|
PATH = the-archives-dir
|
||||||
|
`, []testLocalStoragePathCase{
|
||||||
|
{loadAttachmentFrom, &Attachment.Storage, "/data/gitea/attachments"},
|
||||||
|
{loadLFSFrom, &LFS.Storage, "/data/gitea/lfs"},
|
||||||
|
{loadActionsFrom, &Actions.ArtifactStorage, "/data/gitea/actions_artifacts"},
|
||||||
|
{loadPackagesFrom, &Packages.Storage, "/data/gitea/packages"},
|
||||||
|
{loadRepoArchiveFrom, &RepoArchive.Storage, "/data/gitea/the-archives-dir"},
|
||||||
|
{loadActionsFrom, &Actions.LogStorage, "/data/gitea/actions_log"},
|
||||||
|
{loadAvatarsFrom, &Avatar.Storage, "/data/gitea/avatars"},
|
||||||
|
{loadRepoAvatarFrom, &RepoAvatar.Storage, "/data/gitea/repo-avatars"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageInheritStorageTypeLocalPathOverride3(t *testing.T) {
|
||||||
|
testLocalStoragePath(t, "/appdata", `
|
||||||
|
[storage.repo-archive]
|
||||||
|
STORAGE_TYPE = local
|
||||||
|
PATH = /data/gitea/archives
|
||||||
|
`, []testLocalStoragePathCase{
|
||||||
|
{loadAttachmentFrom, &Attachment.Storage, "/appdata/attachments"},
|
||||||
|
{loadLFSFrom, &LFS.Storage, "/appdata/lfs"},
|
||||||
|
{loadActionsFrom, &Actions.ArtifactStorage, "/appdata/actions_artifacts"},
|
||||||
|
{loadPackagesFrom, &Packages.Storage, "/appdata/packages"},
|
||||||
|
{loadRepoArchiveFrom, &RepoArchive.Storage, "/data/gitea/archives"},
|
||||||
|
{loadActionsFrom, &Actions.LogStorage, "/appdata/actions_log"},
|
||||||
|
{loadAvatarsFrom, &Avatar.Storage, "/appdata/avatars"},
|
||||||
|
{loadRepoAvatarFrom, &RepoAvatar.Storage, "/appdata/repo-avatars"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageInheritStorageTypeLocalPathOverride3_5(t *testing.T) {
|
||||||
|
testLocalStoragePath(t, "/appdata", `
|
||||||
|
[storage.repo-archive]
|
||||||
|
STORAGE_TYPE = local
|
||||||
|
PATH = a-relative-path
|
||||||
|
`, []testLocalStoragePathCase{
|
||||||
|
{loadAttachmentFrom, &Attachment.Storage, "/appdata/attachments"},
|
||||||
|
{loadLFSFrom, &LFS.Storage, "/appdata/lfs"},
|
||||||
|
{loadActionsFrom, &Actions.ArtifactStorage, "/appdata/actions_artifacts"},
|
||||||
|
{loadPackagesFrom, &Packages.Storage, "/appdata/packages"},
|
||||||
|
{loadRepoArchiveFrom, &RepoArchive.Storage, "/appdata/a-relative-path"},
|
||||||
|
{loadActionsFrom, &Actions.LogStorage, "/appdata/actions_log"},
|
||||||
|
{loadAvatarsFrom, &Avatar.Storage, "/appdata/avatars"},
|
||||||
|
{loadRepoAvatarFrom, &RepoAvatar.Storage, "/appdata/repo-avatars"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageInheritStorageTypeLocalPathOverride4(t *testing.T) {
|
||||||
|
testLocalStoragePath(t, "/appdata", `
|
||||||
|
[storage.repo-archive]
|
||||||
|
STORAGE_TYPE = local
|
||||||
|
PATH = /data/gitea/archives
|
||||||
|
|
||||||
|
[repo-archive]
|
||||||
|
PATH = /tmp/gitea/archives
|
||||||
|
`, []testLocalStoragePathCase{
|
||||||
|
{loadAttachmentFrom, &Attachment.Storage, "/appdata/attachments"},
|
||||||
|
{loadLFSFrom, &LFS.Storage, "/appdata/lfs"},
|
||||||
|
{loadActionsFrom, &Actions.ArtifactStorage, "/appdata/actions_artifacts"},
|
||||||
|
{loadPackagesFrom, &Packages.Storage, "/appdata/packages"},
|
||||||
|
{loadRepoArchiveFrom, &RepoArchive.Storage, "/tmp/gitea/archives"},
|
||||||
|
{loadActionsFrom, &Actions.LogStorage, "/appdata/actions_log"},
|
||||||
|
{loadAvatarsFrom, &Avatar.Storage, "/appdata/avatars"},
|
||||||
|
{loadRepoAvatarFrom, &RepoAvatar.Storage, "/appdata/repo-avatars"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageInheritStorageTypeLocalPathOverride5(t *testing.T) {
|
||||||
|
testLocalStoragePath(t, "/appdata", `
|
||||||
|
[storage.repo-archive]
|
||||||
|
STORAGE_TYPE = local
|
||||||
|
PATH = /data/gitea/archives
|
||||||
|
|
||||||
|
[repo-archive]
|
||||||
|
`, []testLocalStoragePathCase{
|
||||||
|
{loadAttachmentFrom, &Attachment.Storage, "/appdata/attachments"},
|
||||||
|
{loadLFSFrom, &LFS.Storage, "/appdata/lfs"},
|
||||||
|
{loadActionsFrom, &Actions.ArtifactStorage, "/appdata/actions_artifacts"},
|
||||||
|
{loadPackagesFrom, &Packages.Storage, "/appdata/packages"},
|
||||||
|
{loadRepoArchiveFrom, &RepoArchive.Storage, "/data/gitea/archives"},
|
||||||
|
{loadActionsFrom, &Actions.LogStorage, "/appdata/actions_log"},
|
||||||
|
{loadAvatarsFrom, &Avatar.Storage, "/appdata/avatars"},
|
||||||
|
{loadRepoAvatarFrom, &RepoAvatar.Storage, "/appdata/repo-avatars"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageInheritStorageTypeLocalPathOverride72(t *testing.T) {
|
||||||
|
testLocalStoragePath(t, "/appdata", `
|
||||||
|
[repo-archive]
|
||||||
|
STORAGE_TYPE = local
|
||||||
|
PATH = archives
|
||||||
|
`, []testLocalStoragePathCase{
|
||||||
|
{loadRepoArchiveFrom, &RepoArchive.Storage, "/appdata/archives"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageConfiguration20(t *testing.T) {
|
||||||
|
cfg, err := NewConfigProviderFromData(`
|
||||||
|
[repo-archive]
|
||||||
|
STORAGE_TYPE = my_storage
|
||||||
|
PATH = archives
|
||||||
|
`)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Error(t, loadRepoArchiveFrom(cfg))
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageConfiguration21(t *testing.T) {
|
||||||
|
testLocalStoragePath(t, "/appdata", `
|
||||||
|
[storage.repo-archive]
|
||||||
|
`, []testLocalStoragePathCase{
|
||||||
|
{loadRepoArchiveFrom, &RepoArchive.Storage, "/appdata/repo-archive"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageConfiguration22(t *testing.T) {
|
||||||
|
testLocalStoragePath(t, "/appdata", `
|
||||||
|
[storage.repo-archive]
|
||||||
|
PATH = archives
|
||||||
|
`, []testLocalStoragePathCase{
|
||||||
|
{loadRepoArchiveFrom, &RepoArchive.Storage, "/appdata/archives"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageConfiguration23(t *testing.T) {
|
||||||
|
cfg, err := NewConfigProviderFromData(`
|
||||||
|
[repo-archive]
|
||||||
|
STORAGE_TYPE = minio
|
||||||
|
MINIO_ACCESS_KEY_ID = my_access_key
|
||||||
|
MINIO_SECRET_ACCESS_KEY = my_secret_key
|
||||||
|
`)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = getStorage(cfg, "", "", nil)
|
||||||
|
assert.Error(t, err)
|
||||||
|
|
||||||
|
assert.NoError(t, loadRepoArchiveFrom(cfg))
|
||||||
|
cp := RepoArchive.Storage.ToShadowCopy()
|
||||||
|
assert.EqualValues(t, "******", cp.MinioConfig.AccessKeyID)
|
||||||
|
assert.EqualValues(t, "******", cp.MinioConfig.SecretAccessKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageConfiguration24(t *testing.T) {
|
||||||
|
cfg, err := NewConfigProviderFromData(`
|
||||||
|
[repo-archive]
|
||||||
|
STORAGE_TYPE = my_archive
|
||||||
|
|
||||||
|
[storage.my_archive]
|
||||||
|
; unsupported, storage type should be defined explicitly
|
||||||
|
PATH = archives
|
||||||
|
`)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Error(t, loadRepoArchiveFrom(cfg))
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageConfiguration25(t *testing.T) {
|
||||||
|
cfg, err := NewConfigProviderFromData(`
|
||||||
|
[repo-archive]
|
||||||
|
STORAGE_TYPE = my_archive
|
||||||
|
|
||||||
|
[storage.my_archive]
|
||||||
|
; unsupported, storage type should be known type
|
||||||
|
STORAGE_TYPE = unknown // should be local or minio
|
||||||
|
PATH = archives
|
||||||
|
`)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Error(t, loadRepoArchiveFrom(cfg))
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageConfiguration26(t *testing.T) {
|
||||||
|
cfg, err := NewConfigProviderFromData(`
|
||||||
|
[repo-archive]
|
||||||
|
STORAGE_TYPE = minio
|
||||||
|
MINIO_ACCESS_KEY_ID = my_access_key
|
||||||
|
MINIO_SECRET_ACCESS_KEY = my_secret_key
|
||||||
|
; wrong configuration
|
||||||
|
MINIO_USE_SSL = abc
|
||||||
|
`)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
// assert.Error(t, loadRepoArchiveFrom(cfg))
|
||||||
|
// FIXME: this should return error but now ini package's MapTo() doesn't check type
|
||||||
|
assert.NoError(t, loadRepoArchiveFrom(cfg))
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_getStorageConfiguration27(t *testing.T) {
|
||||||
|
cfg, err := NewConfigProviderFromData(`
|
||||||
|
[storage.repo-archive]
|
||||||
|
STORAGE_TYPE = minio
|
||||||
|
MINIO_ACCESS_KEY_ID = my_access_key
|
||||||
|
MINIO_SECRET_ACCESS_KEY = my_secret_key
|
||||||
|
MINIO_USE_SSL = true
|
||||||
|
`)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NoError(t, loadRepoArchiveFrom(cfg))
|
||||||
|
assert.EqualValues(t, "my_access_key", RepoArchive.Storage.MinioConfig.AccessKeyID)
|
||||||
|
assert.EqualValues(t, "my_secret_key", RepoArchive.Storage.MinioConfig.SecretAccessKey)
|
||||||
|
assert.EqualValues(t, true, RepoArchive.Storage.MinioConfig.UseSSL)
|
||||||
|
assert.EqualValues(t, "repo-archive/", RepoArchive.Storage.MinioConfig.BasePath)
|
||||||
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"reflect"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
@ -164,6 +165,10 @@ func sessionHandler(session ssh.Session) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool {
|
func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool {
|
||||||
|
// FIXME: the "ssh.Context" is not thread-safe, so db operations should use the immutable parent "Context"
|
||||||
|
// TODO: Remove after https://github.com/gliderlabs/ssh/pull/211
|
||||||
|
parentCtx := reflect.ValueOf(ctx).Elem().FieldByName("Context").Interface().(context.Context)
|
||||||
|
|
||||||
if log.IsDebug() { // <- FingerprintSHA256 is kinda expensive so only calculate it if necessary
|
if log.IsDebug() { // <- FingerprintSHA256 is kinda expensive so only calculate it if necessary
|
||||||
log.Debug("Handle Public Key: Fingerprint: %s from %s", gossh.FingerprintSHA256(key), ctx.RemoteAddr())
|
log.Debug("Handle Public Key: Fingerprint: %s from %s", gossh.FingerprintSHA256(key), ctx.RemoteAddr())
|
||||||
}
|
}
|
||||||
|
@ -189,7 +194,7 @@ func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool {
|
||||||
// look for the exact principal
|
// look for the exact principal
|
||||||
principalLoop:
|
principalLoop:
|
||||||
for _, principal := range cert.ValidPrincipals {
|
for _, principal := range cert.ValidPrincipals {
|
||||||
pkey, err := asymkey_model.SearchPublicKeyByContentExact(ctx, principal)
|
pkey, err := asymkey_model.SearchPublicKeyByContentExact(parentCtx, principal)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if asymkey_model.IsErrKeyNotExist(err) {
|
if asymkey_model.IsErrKeyNotExist(err) {
|
||||||
log.Debug("Principal Rejected: %s Unknown Principal: %s", ctx.RemoteAddr(), principal)
|
log.Debug("Principal Rejected: %s Unknown Principal: %s", ctx.RemoteAddr(), principal)
|
||||||
|
@ -246,7 +251,7 @@ func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool {
|
||||||
log.Debug("Handle Public Key: %s Fingerprint: %s is not a certificate", ctx.RemoteAddr(), gossh.FingerprintSHA256(key))
|
log.Debug("Handle Public Key: %s Fingerprint: %s is not a certificate", ctx.RemoteAddr(), gossh.FingerprintSHA256(key))
|
||||||
}
|
}
|
||||||
|
|
||||||
pkey, err := asymkey_model.SearchPublicKeyByContent(ctx, strings.TrimSpace(string(gossh.MarshalAuthorizedKey(key))))
|
pkey, err := asymkey_model.SearchPublicKeyByContent(parentCtx, strings.TrimSpace(string(gossh.MarshalAuthorizedKey(key))))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if asymkey_model.IsErrKeyNotExist(err) {
|
if asymkey_model.IsErrKeyNotExist(err) {
|
||||||
log.Warn("Unknown public key: %s from %s", gossh.FingerprintSHA256(key), ctx.RemoteAddr())
|
log.Warn("Unknown public key: %s from %s", gossh.FingerprintSHA256(key), ctx.RemoteAddr())
|
||||||
|
|
|
@ -84,17 +84,22 @@ func NewMinioStorage(ctx context.Context, cfg *setting.Storage) (ObjectStorage,
|
||||||
Creds: credentials.NewStaticV4(config.AccessKeyID, config.SecretAccessKey, ""),
|
Creds: credentials.NewStaticV4(config.AccessKeyID, config.SecretAccessKey, ""),
|
||||||
Secure: config.UseSSL,
|
Secure: config.UseSSL,
|
||||||
Transport: &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: config.InsecureSkipVerify}},
|
Transport: &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: config.InsecureSkipVerify}},
|
||||||
|
Region: config.Location,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, convertMinioErr(err)
|
return nil, convertMinioErr(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check to see if we already own this bucket
|
||||||
|
exists, errBucketExists := minioClient.BucketExists(ctx, config.Bucket)
|
||||||
|
if errBucketExists != nil {
|
||||||
|
return nil, convertMinioErr(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !exists {
|
||||||
if err := minioClient.MakeBucket(ctx, config.Bucket, minio.MakeBucketOptions{
|
if err := minioClient.MakeBucket(ctx, config.Bucket, minio.MakeBucketOptions{
|
||||||
Region: config.Location,
|
Region: config.Location,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
// Check to see if we already own this bucket (which happens if you run this twice)
|
|
||||||
exists, errBucketExists := minioClient.BucketExists(ctx, config.Bucket)
|
|
||||||
if !exists || errBucketExists != nil {
|
|
||||||
return nil, convertMinioErr(err)
|
return nil, convertMinioErr(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,12 @@ func IsNormalPageCompleted(s string) bool {
|
||||||
return strings.Contains(s, `<footer class="page-footer"`) && strings.Contains(s, `</html>`)
|
return strings.Contains(s, `<footer class="page-footer"`) && strings.Contains(s, `</html>`)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func MockVariableValue[T any](p *T, v T) (reset func()) {
|
||||||
|
old := *p
|
||||||
|
*p = v
|
||||||
|
return func() { *p = old }
|
||||||
|
}
|
||||||
|
|
||||||
func MockVariable[T any](variable *T, mock T) func() {
|
func MockVariable[T any](variable *T, mock T) func() {
|
||||||
original := *variable
|
original := *variable
|
||||||
*variable = mock
|
*variable = mock
|
||||||
|
|
|
@ -71,7 +71,7 @@ func (ct SniffedType) IsRepresentableAsText() bool {
|
||||||
return ct.IsText() || ct.IsSvgImage()
|
return ct.IsText() || ct.IsSvgImage()
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsBrowsableType returns whether a non-text type can be displayed in a browser
|
// IsBrowsableBinaryType returns whether a non-text type can be displayed in a browser
|
||||||
func (ct SniffedType) IsBrowsableBinaryType() bool {
|
func (ct SniffedType) IsBrowsableBinaryType() bool {
|
||||||
return ct.IsImage() || ct.IsSvgImage() || ct.IsPDF() || ct.IsVideo() || ct.IsAudio()
|
return ct.IsImage() || ct.IsSvgImage() || ct.IsPDF() || ct.IsVideo() || ct.IsAudio()
|
||||||
}
|
}
|
||||||
|
@ -116,6 +116,17 @@ func DetectContentType(data []byte) SniffedType {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ct == "application/ogg" {
|
||||||
|
dataHead := data
|
||||||
|
if len(dataHead) > 256 {
|
||||||
|
dataHead = dataHead[:256] // only need to do a quick check for the file header
|
||||||
|
}
|
||||||
|
if bytes.Contains(dataHead, []byte("theora")) || bytes.Contains(dataHead, []byte("dirac")) {
|
||||||
|
ct = "video/ogg" // ogg is only used for some video formats, and it's not popular
|
||||||
|
} else {
|
||||||
|
ct = "audio/ogg" // for most cases, it is used as an audio container
|
||||||
|
}
|
||||||
|
}
|
||||||
return SniffedType{ct}
|
return SniffedType{ct}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ package typesniffer
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
|
"encoding/hex"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -121,3 +122,15 @@ func TestDetectContentTypeFromReader(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.True(t, st.IsAudio())
|
assert.True(t, st.IsAudio())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDetectContentTypeOgg(t *testing.T) {
|
||||||
|
oggAudio, _ := hex.DecodeString("4f67675300020000000000000000352f0000000000007dc39163011e01766f72626973000000000244ac0000000000000071020000000000b8014f6767530000")
|
||||||
|
st, err := DetectContentTypeFromReader(bytes.NewReader(oggAudio))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, st.IsAudio())
|
||||||
|
|
||||||
|
oggVideo, _ := hex.DecodeString("4f676753000200000000000000007d9747ef000000009b59daf3012a807468656f7261030201001e00110001e000010e00020000001e00000001000001000001")
|
||||||
|
st, err = DetectContentTypeFromReader(bytes.NewReader(oggVideo))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, st.IsVideo())
|
||||||
|
}
|
||||||
|
|
|
@ -2300,6 +2300,7 @@ settings.tags.protection.none = There are no protected tags.
|
||||||
settings.tags.protection.pattern.description = You can use a single name or a glob pattern or regular expression to match multiple tags. Read more in the <a target="_blank" rel="noopener" href="https://docs.gitea.io/en-us/protected-tags/">protected tags guide</a>.
|
settings.tags.protection.pattern.description = You can use a single name or a glob pattern or regular expression to match multiple tags. Read more in the <a target="_blank" rel="noopener" href="https://docs.gitea.io/en-us/protected-tags/">protected tags guide</a>.
|
||||||
settings.bot_token = Bot Token
|
settings.bot_token = Bot Token
|
||||||
settings.chat_id = Chat ID
|
settings.chat_id = Chat ID
|
||||||
|
settings.thread_id = Thread ID
|
||||||
settings.matrix.homeserver_url = Homeserver URL
|
settings.matrix.homeserver_url = Homeserver URL
|
||||||
settings.matrix.room_id = Room ID
|
settings.matrix.room_id = Room ID
|
||||||
settings.matrix.message_type = Message Type
|
settings.matrix.message_type = Message Type
|
||||||
|
@ -2500,7 +2501,7 @@ tag.create_success = Tag "%s" has been created.
|
||||||
topic.manage_topics = Manage Topics
|
topic.manage_topics = Manage Topics
|
||||||
topic.done = Done
|
topic.done = Done
|
||||||
topic.count_prompt = You cannot select more than 25 topics
|
topic.count_prompt = You cannot select more than 25 topics
|
||||||
topic.format_prompt = Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
|
topic.format_prompt = Topics must start with a letter or number, can include dashes ('-') and dots ('.'), can be up to 35 characters long. Letters must be lowercase.
|
||||||
|
|
||||||
find_file.go_to_file = Go to file
|
find_file.go_to_file = Go to file
|
||||||
find_file.no_matching = No matching file found
|
find_file.no_matching = No matching file found
|
||||||
|
@ -3491,3 +3492,12 @@ need_approval_desc = Need approval to run workflows for fork pull request.
|
||||||
type-1.display_name = Individual Project
|
type-1.display_name = Individual Project
|
||||||
type-2.display_name = Repository Project
|
type-2.display_name = Repository Project
|
||||||
type-3.display_name = Organization Project
|
type-3.display_name = Organization Project
|
||||||
|
|
||||||
|
[git.filemode]
|
||||||
|
changed_filemode = %[1]s → %[2]s
|
||||||
|
# Ordered by git filemode value, ascending. E.g. directory has "040000", normal file has "100644", …
|
||||||
|
directory = Directory
|
||||||
|
normal_file = Normal file
|
||||||
|
executable_file = Executable file
|
||||||
|
symbolic_link = Symbolic link
|
||||||
|
submodule = Submodule
|
||||||
|
|
|
@ -9,6 +9,9 @@ import (
|
||||||
|
|
||||||
packages_model "code.gitea.io/gitea/models/packages"
|
packages_model "code.gitea.io/gitea/models/packages"
|
||||||
nuget_module "code.gitea.io/gitea/modules/packages/nuget"
|
nuget_module "code.gitea.io/gitea/modules/packages/nuget"
|
||||||
|
|
||||||
|
"golang.org/x/text/collate"
|
||||||
|
"golang.org/x/text/language"
|
||||||
)
|
)
|
||||||
|
|
||||||
// https://docs.microsoft.com/en-us/nuget/api/service-index#resources
|
// https://docs.microsoft.com/en-us/nuget/api/service-index#resources
|
||||||
|
@ -207,9 +210,15 @@ func createSearchResultResponse(l *linkBuilder, totalHits int64, pds []*packages
|
||||||
grouped[pd.Package.Name] = append(grouped[pd.Package.Name], pd)
|
grouped[pd.Package.Name] = append(grouped[pd.Package.Name], pd)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
keys := make([]string, 0, len(grouped))
|
||||||
|
for key := range grouped {
|
||||||
|
keys = append(keys, key)
|
||||||
|
}
|
||||||
|
collate.New(language.English, collate.IgnoreCase).SortStrings(keys)
|
||||||
|
|
||||||
data := make([]*SearchResult, 0, len(pds))
|
data := make([]*SearchResult, 0, len(pds))
|
||||||
for _, group := range grouped {
|
for _, key := range keys {
|
||||||
data = append(data, createSearchResult(l, group))
|
data = append(data, createSearchResult(l, grouped[key]))
|
||||||
}
|
}
|
||||||
|
|
||||||
return &SearchResultResponse{
|
return &SearchResultResponse{
|
||||||
|
|
|
@ -16,6 +16,7 @@ import (
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
packages_model "code.gitea.io/gitea/models/packages"
|
packages_model "code.gitea.io/gitea/models/packages"
|
||||||
|
nuget_model "code.gitea.io/gitea/models/packages/nuget"
|
||||||
"code.gitea.io/gitea/modules/context"
|
"code.gitea.io/gitea/modules/context"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
packages_module "code.gitea.io/gitea/modules/packages"
|
packages_module "code.gitea.io/gitea/modules/packages"
|
||||||
|
@ -115,7 +116,7 @@ func SearchServiceV2(ctx *context.Context) {
|
||||||
skip, take := ctx.FormInt("$skip"), ctx.FormInt("$top")
|
skip, take := ctx.FormInt("$skip"), ctx.FormInt("$top")
|
||||||
paginator := db.NewAbsoluteListOptions(skip, take)
|
paginator := db.NewAbsoluteListOptions(skip, take)
|
||||||
|
|
||||||
pvs, total, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
|
pvs, total, err := packages_model.SearchLatestVersions(ctx, &packages_model.PackageSearchOptions{
|
||||||
OwnerID: ctx.Package.Owner.ID,
|
OwnerID: ctx.Package.Owner.ID,
|
||||||
Type: packages_model.TypeNuGet,
|
Type: packages_model.TypeNuGet,
|
||||||
Name: packages_model.SearchValue{
|
Name: packages_model.SearchValue{
|
||||||
|
@ -166,9 +167,8 @@ func SearchServiceV2(ctx *context.Context) {
|
||||||
|
|
||||||
// http://docs.oasis-open.org/odata/odata/v4.0/errata03/os/complete/part2-url-conventions/odata-v4.0-errata03-os-part2-url-conventions-complete.html#_Toc453752351
|
// http://docs.oasis-open.org/odata/odata/v4.0/errata03/os/complete/part2-url-conventions/odata-v4.0-errata03-os-part2-url-conventions-complete.html#_Toc453752351
|
||||||
func SearchServiceV2Count(ctx *context.Context) {
|
func SearchServiceV2Count(ctx *context.Context) {
|
||||||
count, err := packages_model.CountVersions(ctx, &packages_model.PackageSearchOptions{
|
count, err := nuget_model.CountPackages(ctx, &packages_model.PackageSearchOptions{
|
||||||
OwnerID: ctx.Package.Owner.ID,
|
OwnerID: ctx.Package.Owner.ID,
|
||||||
Type: packages_model.TypeNuGet,
|
|
||||||
Name: packages_model.SearchValue{
|
Name: packages_model.SearchValue{
|
||||||
Value: getSearchTerm(ctx),
|
Value: getSearchTerm(ctx),
|
||||||
},
|
},
|
||||||
|
@ -184,9 +184,8 @@ func SearchServiceV2Count(ctx *context.Context) {
|
||||||
|
|
||||||
// https://docs.microsoft.com/en-us/nuget/api/search-query-service-resource#search-for-packages
|
// https://docs.microsoft.com/en-us/nuget/api/search-query-service-resource#search-for-packages
|
||||||
func SearchServiceV3(ctx *context.Context) {
|
func SearchServiceV3(ctx *context.Context) {
|
||||||
pvs, count, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
|
pvs, count, err := nuget_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
|
||||||
OwnerID: ctx.Package.Owner.ID,
|
OwnerID: ctx.Package.Owner.ID,
|
||||||
Type: packages_model.TypeNuGet,
|
|
||||||
Name: packages_model.SearchValue{Value: ctx.FormTrim("q")},
|
Name: packages_model.SearchValue{Value: ctx.FormTrim("q")},
|
||||||
IsInternal: util.OptionalBoolFalse,
|
IsInternal: util.OptionalBoolFalse,
|
||||||
Paginator: db.NewAbsoluteListOptions(
|
Paginator: db.NewAbsoluteListOptions(
|
||||||
|
|
|
@ -127,7 +127,7 @@ func EditWikiPage(ctx *context.APIContext) {
|
||||||
|
|
||||||
form := web.GetForm(ctx).(*api.CreateWikiPageOptions)
|
form := web.GetForm(ctx).(*api.CreateWikiPageOptions)
|
||||||
|
|
||||||
oldWikiName := wiki_service.WebPathFromRequest(ctx.Params(":pageName"))
|
oldWikiName := wiki_service.WebPathFromRequest(ctx.PathParamRaw(":pageName"))
|
||||||
newWikiName := wiki_service.UserTitleToWebPath("", form.Title)
|
newWikiName := wiki_service.UserTitleToWebPath("", form.Title)
|
||||||
|
|
||||||
if len(newWikiName) == 0 {
|
if len(newWikiName) == 0 {
|
||||||
|
@ -231,7 +231,7 @@ func DeleteWikiPage(ctx *context.APIContext) {
|
||||||
// "404":
|
// "404":
|
||||||
// "$ref": "#/responses/notFound"
|
// "$ref": "#/responses/notFound"
|
||||||
|
|
||||||
wikiName := wiki_service.WebPathFromRequest(ctx.Params(":pageName"))
|
wikiName := wiki_service.WebPathFromRequest(ctx.PathParamRaw(":pageName"))
|
||||||
|
|
||||||
if err := wiki_service.DeleteWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, wikiName); err != nil {
|
if err := wiki_service.DeleteWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, wikiName); err != nil {
|
||||||
if err.Error() == "file does not exist" {
|
if err.Error() == "file does not exist" {
|
||||||
|
@ -359,7 +359,7 @@ func GetWikiPage(ctx *context.APIContext) {
|
||||||
// "$ref": "#/responses/notFound"
|
// "$ref": "#/responses/notFound"
|
||||||
|
|
||||||
// get requested pagename
|
// get requested pagename
|
||||||
pageName := wiki_service.WebPathFromRequest(ctx.Params(":pageName"))
|
pageName := wiki_service.WebPathFromRequest(ctx.PathParamRaw(":pageName"))
|
||||||
|
|
||||||
wikiPage := getWikiPage(ctx, pageName)
|
wikiPage := getWikiPage(ctx, pageName)
|
||||||
if !ctx.Written() {
|
if !ctx.Written() {
|
||||||
|
@ -409,7 +409,7 @@ func ListPageRevisions(ctx *context.APIContext) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// get requested pagename
|
// get requested pagename
|
||||||
pageName := wiki_service.WebPathFromRequest(ctx.Params(":pageName"))
|
pageName := wiki_service.WebPathFromRequest(ctx.PathParamRaw(":pageName"))
|
||||||
if len(pageName) == 0 {
|
if len(pageName) == 0 {
|
||||||
pageName = "Home"
|
pageName = "Home"
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ func Queues(ctx *context.Context) {
|
||||||
if !setting.IsProd {
|
if !setting.IsProd {
|
||||||
initTestQueueOnce()
|
initTestQueueOnce()
|
||||||
}
|
}
|
||||||
ctx.Data["Title"] = ctx.Tr("admin.monitor.queue")
|
ctx.Data["Title"] = ctx.Tr("admin.monitor.queues")
|
||||||
ctx.Data["PageIsAdminMonitorQueue"] = true
|
ctx.Data["PageIsAdminMonitorQueue"] = true
|
||||||
ctx.Data["Queues"] = queue.GetManager().ManagedQueues()
|
ctx.Data["Queues"] = queue.GetManager().ManagedQueues()
|
||||||
ctx.HTML(http.StatusOK, tplQueue)
|
ctx.HTML(http.StatusOK, tplQueue)
|
||||||
|
|
|
@ -339,7 +339,7 @@ func Diff(ctx *context.Context) {
|
||||||
ctx.Data["Commit"] = commit
|
ctx.Data["Commit"] = commit
|
||||||
ctx.Data["Diff"] = diff
|
ctx.Data["Diff"] = diff
|
||||||
|
|
||||||
statuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, commitID, db.ListOptions{})
|
statuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, commitID, db.ListOptions{ListAll: true})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("GetLatestCommitStatus: %v", err)
|
log.Error("GetLatestCommitStatus: %v", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -370,8 +370,10 @@ func editFilePost(ctx *context.Context, form forms.EditRepoFileForm, isNewFile b
|
||||||
}
|
}
|
||||||
|
|
||||||
if ctx.Repo.Repository.IsEmpty {
|
if ctx.Repo.Repository.IsEmpty {
|
||||||
|
if isEmpty, err := ctx.Repo.GitRepo.IsEmpty(); err == nil && !isEmpty {
|
||||||
_ = repo_model.UpdateRepositoryCols(ctx, &repo_model.Repository{ID: ctx.Repo.Repository.ID, IsEmpty: false}, "is_empty")
|
_ = repo_model.UpdateRepositoryCols(ctx, &repo_model.Repository{ID: ctx.Repo.Repository.ID, IsEmpty: false}, "is_empty")
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
redirectForCommitChoice(ctx, form.CommitChoice, branchName, form.TreePath)
|
redirectForCommitChoice(ctx, form.CommitChoice, branchName, form.TreePath)
|
||||||
}
|
}
|
||||||
|
@ -763,8 +765,10 @@ func UploadFilePost(ctx *context.Context) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if ctx.Repo.Repository.IsEmpty {
|
if ctx.Repo.Repository.IsEmpty {
|
||||||
|
if isEmpty, err := ctx.Repo.GitRepo.IsEmpty(); err == nil && !isEmpty {
|
||||||
_ = repo_model.UpdateRepositoryCols(ctx, &repo_model.Repository{ID: ctx.Repo.Repository.ID, IsEmpty: false}, "is_empty")
|
_ = repo_model.UpdateRepositoryCols(ctx, &repo_model.Repository{ID: ctx.Repo.Repository.ID, IsEmpty: false}, "is_empty")
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
redirectForCommitChoice(ctx, form.CommitChoice, branchName, form.TreePath)
|
redirectForCommitChoice(ctx, form.CommitChoice, branchName, form.TreePath)
|
||||||
}
|
}
|
||||||
|
|
|
@ -421,7 +421,7 @@ func PrepareMergedViewPullInfo(ctx *context.Context, issue *issues_model.Issue)
|
||||||
|
|
||||||
if len(compareInfo.Commits) != 0 {
|
if len(compareInfo.Commits) != 0 {
|
||||||
sha := compareInfo.Commits[0].ID.String()
|
sha := compareInfo.Commits[0].ID.String()
|
||||||
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, sha, db.ListOptions{})
|
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, sha, db.ListOptions{ListAll: true})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.ServerError("GetLatestCommitStatus", err)
|
ctx.ServerError("GetLatestCommitStatus", err)
|
||||||
return nil
|
return nil
|
||||||
|
@ -483,7 +483,7 @@ func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
|
||||||
ctx.ServerError(fmt.Sprintf("GetRefCommitID(%s)", pull.GetGitRefName()), err)
|
ctx.ServerError(fmt.Sprintf("GetRefCommitID(%s)", pull.GetGitRefName()), err)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptions{})
|
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptions{ListAll: true})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.ServerError("GetLatestCommitStatus", err)
|
ctx.ServerError("GetLatestCommitStatus", err)
|
||||||
return nil
|
return nil
|
||||||
|
@ -575,7 +575,7 @@ func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptions{})
|
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptions{ListAll: true})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.ServerError("GetLatestCommitStatus", err)
|
ctx.ServerError("GetLatestCommitStatus", err)
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -834,7 +834,7 @@ func renderDirectoryFiles(ctx *context.Context, timeout time.Duration) git.Entri
|
||||||
ctx.Data["LatestCommitVerification"] = verification
|
ctx.Data["LatestCommitVerification"] = verification
|
||||||
ctx.Data["LatestCommitUser"] = user_model.ValidateCommitWithEmail(ctx, latestCommit)
|
ctx.Data["LatestCommitUser"] = user_model.ValidateCommitWithEmail(ctx, latestCommit)
|
||||||
|
|
||||||
statuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, latestCommit.ID.String(), db.ListOptions{})
|
statuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, latestCommit.ID.String(), db.ListOptions{ListAll: true})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("GetLatestCommitStatus: %v", err)
|
log.Error("GetLatestCommitStatus: %v", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -453,12 +453,13 @@ func telegramHookParams(ctx *context.Context) webhookParams {
|
||||||
|
|
||||||
return webhookParams{
|
return webhookParams{
|
||||||
Type: webhook_module.TELEGRAM,
|
Type: webhook_module.TELEGRAM,
|
||||||
URL: fmt.Sprintf("https://api.telegram.org/bot%s/sendMessage?chat_id=%s", url.PathEscape(form.BotToken), url.QueryEscape(form.ChatID)),
|
URL: fmt.Sprintf("https://api.telegram.org/bot%s/sendMessage?chat_id=%s&message_thread_id=%s", url.PathEscape(form.BotToken), url.QueryEscape(form.ChatID), url.QueryEscape(form.ThreadID)),
|
||||||
ContentType: webhook.ContentTypeJSON,
|
ContentType: webhook.ContentTypeJSON,
|
||||||
WebhookForm: form.WebhookForm,
|
WebhookForm: form.WebhookForm,
|
||||||
Meta: &webhook_service.TelegramMeta{
|
Meta: &webhook_service.TelegramMeta{
|
||||||
BotToken: form.BotToken,
|
BotToken: form.BotToken,
|
||||||
ChatID: form.ChatID,
|
ChatID: form.ChatID,
|
||||||
|
ThreadID: form.ThreadID,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -186,7 +186,7 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) {
|
||||||
ctx.Data["Pages"] = pages
|
ctx.Data["Pages"] = pages
|
||||||
|
|
||||||
// get requested page name
|
// get requested page name
|
||||||
pageName := wiki_service.WebPathFromRequest(ctx.Params("*"))
|
pageName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
|
||||||
if len(pageName) == 0 {
|
if len(pageName) == 0 {
|
||||||
pageName = "Home"
|
pageName = "Home"
|
||||||
}
|
}
|
||||||
|
@ -333,7 +333,7 @@ func renderRevisionPage(ctx *context.Context) (*git.Repository, *git.TreeEntry)
|
||||||
}
|
}
|
||||||
|
|
||||||
// get requested pagename
|
// get requested pagename
|
||||||
pageName := wiki_service.WebPathFromRequest(ctx.Params("*"))
|
pageName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
|
||||||
if len(pageName) == 0 {
|
if len(pageName) == 0 {
|
||||||
pageName = "Home"
|
pageName = "Home"
|
||||||
}
|
}
|
||||||
|
@ -416,7 +416,7 @@ func renderEditPage(ctx *context.Context) {
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// get requested pagename
|
// get requested pagename
|
||||||
pageName := wiki_service.WebPathFromRequest(ctx.Params("*"))
|
pageName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
|
||||||
if len(pageName) == 0 {
|
if len(pageName) == 0 {
|
||||||
pageName = "Home"
|
pageName = "Home"
|
||||||
}
|
}
|
||||||
|
@ -648,7 +648,7 @@ func WikiRaw(ctx *context.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
providedWebPath := wiki_service.WebPathFromRequest(ctx.Params("*"))
|
providedWebPath := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
|
||||||
providedGitPath := wiki_service.WebPathToGitPath(providedWebPath)
|
providedGitPath := wiki_service.WebPathToGitPath(providedWebPath)
|
||||||
var entry *git.TreeEntry
|
var entry *git.TreeEntry
|
||||||
if commit != nil {
|
if commit != nil {
|
||||||
|
@ -760,7 +760,7 @@ func EditWikiPost(ctx *context.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
oldWikiName := wiki_service.WebPathFromRequest(ctx.Params("*"))
|
oldWikiName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
|
||||||
newWikiName := wiki_service.UserTitleToWebPath("", form.Title)
|
newWikiName := wiki_service.UserTitleToWebPath("", form.Title)
|
||||||
|
|
||||||
if len(form.Message) == 0 {
|
if len(form.Message) == 0 {
|
||||||
|
@ -779,7 +779,7 @@ func EditWikiPost(ctx *context.Context) {
|
||||||
|
|
||||||
// DeleteWikiPagePost delete wiki page
|
// DeleteWikiPagePost delete wiki page
|
||||||
func DeleteWikiPagePost(ctx *context.Context) {
|
func DeleteWikiPagePost(ctx *context.Context) {
|
||||||
wikiName := wiki_service.WebPathFromRequest(ctx.Params("*"))
|
wikiName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
|
||||||
if len(wikiName) == 0 {
|
if len(wikiName) == 0 {
|
||||||
wikiName = "Home"
|
wikiName = "Home"
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,7 +53,7 @@ func RunnersList(ctx *context.Context, opts actions_model.FindRunnerOptions) {
|
||||||
ctx.Data["Runners"] = runners
|
ctx.Data["Runners"] = runners
|
||||||
ctx.Data["Total"] = count
|
ctx.Data["Total"] = count
|
||||||
ctx.Data["RegistrationToken"] = token.Token
|
ctx.Data["RegistrationToken"] = token.Token
|
||||||
ctx.Data["RunnerOnwerID"] = opts.OwnerID
|
ctx.Data["RunnerOwnerID"] = opts.OwnerID
|
||||||
ctx.Data["RunnerRepoID"] = opts.RepoID
|
ctx.Data["RunnerRepoID"] = opts.RepoID
|
||||||
|
|
||||||
pager := context.NewPagination(int(count), opts.PageSize, opts.Page, 5)
|
pager := context.NewPagination(int(count), opts.PageSize, opts.Page, 5)
|
||||||
|
|
|
@ -695,7 +695,7 @@ func getRepoIDs(reposQuery string) []int64 {
|
||||||
return []int64{}
|
return []int64{}
|
||||||
}
|
}
|
||||||
if !issueReposQueryPattern.MatchString(reposQuery) {
|
if !issueReposQueryPattern.MatchString(reposQuery) {
|
||||||
log.Warn("issueReposQueryPattern does not match query")
|
log.Warn("issueReposQueryPattern does not match query: %q", reposQuery)
|
||||||
return []int64{}
|
return []int64{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -120,6 +120,7 @@ func Profile(ctx *context.Context) {
|
||||||
profileContent, err := markdown.RenderString(&markup.RenderContext{
|
profileContent, err := markdown.RenderString(&markup.RenderContext{
|
||||||
Ctx: ctx,
|
Ctx: ctx,
|
||||||
GitRepo: gitRepo,
|
GitRepo: gitRepo,
|
||||||
|
Metas: map[string]string{"mode": "document"},
|
||||||
}, bytes)
|
}, bytes)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.ServerError("RenderString", err)
|
ctx.ServerError("RenderString", err)
|
||||||
|
|
|
@ -302,8 +302,8 @@ func registerRoutes(m *web.Route) {
|
||||||
}
|
}
|
||||||
|
|
||||||
addWebhookEditRoutes := func() {
|
addWebhookEditRoutes := func() {
|
||||||
m.Post("/forgejo/{id}", web.Bind(forms.NewWebhookForm{}), repo.ForgejoHooksEditPost)
|
|
||||||
m.Post("/gitea/{id}", web.Bind(forms.NewWebhookForm{}), repo.GiteaHooksEditPost)
|
m.Post("/gitea/{id}", web.Bind(forms.NewWebhookForm{}), repo.GiteaHooksEditPost)
|
||||||
|
m.Post("/forgejo/{id}", web.Bind(forms.NewWebhookForm{}), repo.ForgejoHooksEditPost)
|
||||||
m.Post("/gogs/{id}", web.Bind(forms.NewGogshookForm{}), repo.GogsHooksEditPost)
|
m.Post("/gogs/{id}", web.Bind(forms.NewGogshookForm{}), repo.GogsHooksEditPost)
|
||||||
m.Post("/slack/{id}", web.Bind(forms.NewSlackHookForm{}), repo.SlackHooksEditPost)
|
m.Post("/slack/{id}", web.Bind(forms.NewSlackHookForm{}), repo.SlackHooksEditPost)
|
||||||
m.Post("/discord/{id}", web.Bind(forms.NewDiscordHookForm{}), repo.DiscordHooksEditPost)
|
m.Post("/discord/{id}", web.Bind(forms.NewDiscordHookForm{}), repo.DiscordHooksEditPost)
|
||||||
|
@ -1133,7 +1133,7 @@ func registerRoutes(m *web.Route) {
|
||||||
m.Get(".atom", feedEnabled, repo.ReleasesFeedAtom)
|
m.Get(".atom", feedEnabled, repo.ReleasesFeedAtom)
|
||||||
}, ctxDataSet("EnableFeed", setting.Other.EnableFeed),
|
}, ctxDataSet("EnableFeed", setting.Other.EnableFeed),
|
||||||
repo.MustBeNotEmpty, reqRepoReleaseReader, context.RepoRefByType(context.RepoRefTag, true))
|
repo.MustBeNotEmpty, reqRepoReleaseReader, context.RepoRefByType(context.RepoRefTag, true))
|
||||||
m.Get("/releases/attachments/{uuid}", repo.GetAttachment, repo.MustBeNotEmpty, reqRepoReleaseReader)
|
m.Get("/releases/attachments/{uuid}", repo.MustBeNotEmpty, reqRepoReleaseReader, repo.GetAttachment)
|
||||||
m.Group("/releases", func() {
|
m.Group("/releases", func() {
|
||||||
m.Get("/new", repo.NewRelease)
|
m.Get("/new", repo.NewRelease)
|
||||||
m.Post("/new", web.Bind(forms.NewReleaseForm{}), repo.NewReleasePost)
|
m.Post("/new", web.Bind(forms.NewReleaseForm{}), repo.NewReleasePost)
|
||||||
|
|
|
@ -75,7 +75,7 @@ func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) er
|
||||||
}
|
}
|
||||||
ctxname := fmt.Sprintf("%s / %s (%s)", runName, job.Name, event)
|
ctxname := fmt.Sprintf("%s / %s (%s)", runName, job.Name, event)
|
||||||
state := toCommitStatus(job.Status)
|
state := toCommitStatus(job.Status)
|
||||||
if statuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptions{}); err == nil {
|
if statuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptions{ListAll: true}); err == nil {
|
||||||
for _, v := range statuses {
|
for _, v := range statuses {
|
||||||
if v.Context == ctxname {
|
if v.Context == ctxname {
|
||||||
if v.State == state {
|
if v.State == state {
|
||||||
|
|
|
@ -56,7 +56,7 @@ func (p *AuthSourceProvider) DisplayName() string {
|
||||||
|
|
||||||
func (p *AuthSourceProvider) IconHTML() template.HTML {
|
func (p *AuthSourceProvider) IconHTML() template.HTML {
|
||||||
if p.iconURL != "" {
|
if p.iconURL != "" {
|
||||||
img := fmt.Sprintf(`<img class="gt-mr-3" width="20" height="20" src="%s" alt="%s">`,
|
img := fmt.Sprintf(`<img class="gt-object-contain gt-mr-3" width="20" height="20" src="%s" alt="%s">`,
|
||||||
html.EscapeString(p.iconURL), html.EscapeString(p.DisplayName()),
|
html.EscapeString(p.iconURL), html.EscapeString(p.DisplayName()),
|
||||||
)
|
)
|
||||||
return template.HTML(img)
|
return template.HTML(img)
|
||||||
|
|
|
@ -51,7 +51,7 @@ func toUser(ctx context.Context, user *user_model.User, signed, authed bool) *ap
|
||||||
ID: user.ID,
|
ID: user.ID,
|
||||||
UserName: user.Name,
|
UserName: user.Name,
|
||||||
FullName: user.FullName,
|
FullName: user.FullName,
|
||||||
Email: user.GetEmail(),
|
Email: user.GetPlaceholderEmail(),
|
||||||
AvatarURL: user.AvatarLink(ctx),
|
AvatarURL: user.AvatarLink(ctx),
|
||||||
Created: user.CreatedUnix.AsTime(),
|
Created: user.CreatedUnix.AsTime(),
|
||||||
Restricted: user.IsRestricted,
|
Restricted: user.IsRestricted,
|
||||||
|
|
|
@ -96,6 +96,12 @@ func ListTasks() TaskTable {
|
||||||
next = e.Next
|
next = e.Next
|
||||||
prev = e.Prev
|
prev = e.Prev
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If the manual run is after the cron run, use that instead.
|
||||||
|
if prev.Before(task.LastRun) {
|
||||||
|
prev = task.LastRun
|
||||||
|
}
|
||||||
|
|
||||||
task.lock.Lock()
|
task.lock.Lock()
|
||||||
tTable = append(tTable, &TaskTableRow{
|
tTable = append(tTable, &TaskTableRow{
|
||||||
Name: task.Name,
|
Name: task.Name,
|
||||||
|
|
|
@ -8,6 +8,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
"sync"
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
system_model "code.gitea.io/gitea/models/system"
|
system_model "code.gitea.io/gitea/models/system"
|
||||||
|
@ -36,6 +37,8 @@ type Task struct {
|
||||||
LastMessage string
|
LastMessage string
|
||||||
LastDoer string
|
LastDoer string
|
||||||
ExecTimes int64
|
ExecTimes int64
|
||||||
|
// This stores the time of the last manual run of this task.
|
||||||
|
LastRun time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
// DoRunAtStart returns if this task should run at the start
|
// DoRunAtStart returns if this task should run at the start
|
||||||
|
@ -87,6 +90,12 @@ func (t *Task) RunWithUser(doer *user_model.User, config Config) {
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
graceful.GetManager().RunWithShutdownContext(func(baseCtx context.Context) {
|
graceful.GetManager().RunWithShutdownContext(func(baseCtx context.Context) {
|
||||||
|
// Store the time of this run, before the function is executed, so it
|
||||||
|
// matches the behavior of what the cron library does.
|
||||||
|
t.lock.Lock()
|
||||||
|
t.LastRun = time.Now()
|
||||||
|
t.lock.Unlock()
|
||||||
|
|
||||||
pm := process.GetManager()
|
pm := process.GetManager()
|
||||||
doerName := ""
|
doerName := ""
|
||||||
if doer != nil && doer.ID != -1 {
|
if doer != nil && doer.ID != -1 {
|
||||||
|
|
18
services/forgejo/main_test.go
Normal file
18
services/forgejo/main_test.go
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package forgejo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
|
||||||
|
_ "code.gitea.io/gitea/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMain(m *testing.M) {
|
||||||
|
unittest.MainTest(m, &unittest.TestOptions{
|
||||||
|
GiteaRootPath: filepath.Join("..", ".."),
|
||||||
|
})
|
||||||
|
}
|
26
services/forgejo/sanity.go
Normal file
26
services/forgejo/sanity.go
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package forgejo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ForgejoV6DatabaseVersion = int64(261) // must be updated once v6 / Gitea v1.21 is out
|
||||||
|
ForgejoV5DatabaseVersion = int64(260)
|
||||||
|
ForgejoV4DatabaseVersion = int64(244)
|
||||||
|
)
|
||||||
|
|
||||||
|
var logFatal = log.Fatal
|
||||||
|
|
||||||
|
func fatal(err error) error {
|
||||||
|
logFatal("%v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func PreMigrationSanityChecks(e db.Engine, dbVersion int64, cfg setting.ConfigProvider) error {
|
||||||
|
return v1TOv5_0_1Included(e, dbVersion, cfg)
|
||||||
|
}
|
31
services/forgejo/sanity_test.go
Normal file
31
services/forgejo/sanity_test.go
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package forgejo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestForgejo_PreMigrationSanityChecks(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
ctx := db.DefaultContext
|
||||||
|
e := db.GetEngine(ctx)
|
||||||
|
|
||||||
|
assert.NoError(t, PreMigrationSanityChecks(e, ForgejoV4DatabaseVersion, configFixture(t, "")))
|
||||||
|
}
|
||||||
|
|
||||||
|
func configFixture(t *testing.T, content string) setting.ConfigProvider {
|
||||||
|
config := filepath.Join(t.TempDir(), "app.ini")
|
||||||
|
assert.NoError(t, os.WriteFile(config, []byte(content), 0o777))
|
||||||
|
cfg, err := setting.NewConfigProviderFromFile(config)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
return cfg
|
||||||
|
}
|
91
services/forgejo/sanity_v1TOv5_0_1Included.go
Normal file
91
services/forgejo/sanity_v1TOv5_0_1Included.go
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package forgejo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
"code.gitea.io/gitea/models/forgejo/semver"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
|
||||||
|
"github.com/hashicorp/go-version"
|
||||||
|
)
|
||||||
|
|
||||||
|
var v1TOv5_0_1IncludedStorageSections = []struct {
|
||||||
|
section string
|
||||||
|
storageSection string
|
||||||
|
}{
|
||||||
|
{"attachment", "storage.attachments"},
|
||||||
|
{"lfs", "storage.lfs"},
|
||||||
|
{"avatar", "storage.avatars"},
|
||||||
|
{"repo-avatar", "storage.repo-avatars"},
|
||||||
|
{"repo-archive", "storage.repo-archive"},
|
||||||
|
{"packages", "storage.packages"},
|
||||||
|
// the actions sections are not included here because they were experimental at the time
|
||||||
|
}
|
||||||
|
|
||||||
|
func v1TOv5_0_1Included(e db.Engine, dbVersion int64, cfg setting.ConfigProvider) error {
|
||||||
|
//
|
||||||
|
// When upgrading from Forgejo > v5 or Gitea > v1.20, no sanity check is necessary
|
||||||
|
//
|
||||||
|
if dbVersion > ForgejoV5DatabaseVersion {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// When upgrading from a Forgejo point version >= v5.0.1, no sanity
|
||||||
|
// check is necessary
|
||||||
|
//
|
||||||
|
// When upgrading from a Gitea >= v1.20 the sanitiy checks will
|
||||||
|
// always be done They are necessary for Gitea [v1.20.0..v1.20.2]
|
||||||
|
// but not for [v1.20.3..] but there is no way to know which point
|
||||||
|
// release was running prior to the upgrade. This may require the
|
||||||
|
// Gitea admin to update their app.ini although it is not necessary
|
||||||
|
// but will have no other consequence.
|
||||||
|
//
|
||||||
|
previousServerVersion, err := semver.GetVersionWithEngine(e)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
upper, err := version.NewVersion("v5.0.1")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if previousServerVersion.GreaterThan(upper) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Sanity checks
|
||||||
|
//
|
||||||
|
|
||||||
|
originalCfg, err := cfg.PrepareSaving()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
messages := make([]string, 0, 10)
|
||||||
|
for _, c := range v1TOv5_0_1IncludedStorageSections {
|
||||||
|
section, _ := originalCfg.GetSection(c.section)
|
||||||
|
if section == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
storageSection, _ := originalCfg.GetSection(c.storageSection)
|
||||||
|
if storageSection == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
messages = append(messages, fmt.Sprintf("[%s] and [%s] may conflict with each other", c.section, c.storageSection))
|
||||||
|
}
|
||||||
|
|
||||||
|
if originalCfg.Section("storage").HasKey("PATH") {
|
||||||
|
messages = append(messages, "[storage].PATH is set and may create storage issues")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(messages) > 0 {
|
||||||
|
return fatal(fmt.Errorf("%s\nThese issues need to be manually fixed in the app.ini file at %s. Please read https://forgejo.org/2023-08-release-v1-20-3-0/ for instructions", strings.Join(messages, "\n"), cfg.GetFile()))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
115
services/forgejo/sanity_v1TOv5_0_1Included_test.go
Normal file
115
services/forgejo/sanity_v1TOv5_0_1Included_test.go
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package forgejo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
"code.gitea.io/gitea/models/forgejo/semver"
|
||||||
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestForgejo_v1TOv5_0_1Included(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
|
logFatal = func(string, ...any) {}
|
||||||
|
defer func() {
|
||||||
|
logFatal = log.Fatal
|
||||||
|
}()
|
||||||
|
|
||||||
|
configWithSoragePath := `
|
||||||
|
[storage]
|
||||||
|
PATH = /something
|
||||||
|
`
|
||||||
|
verifyForgejoV1TOv5_0_1Included(t, configWithSoragePath, "[storage].PATH is set")
|
||||||
|
|
||||||
|
for _, c := range v1TOv5_0_1IncludedStorageSections {
|
||||||
|
config := fmt.Sprintf("[%s]\n[%s]\n", c.section, c.storageSection)
|
||||||
|
verifyForgejoV1TOv5_0_1Included(t, config, fmt.Sprintf("[%s] and [%s]", c.section, c.storageSection))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func verifyForgejoV1TOv5_0_1Included(t *testing.T, config, message string) {
|
||||||
|
ctx := db.DefaultContext
|
||||||
|
e := db.GetEngine(ctx)
|
||||||
|
|
||||||
|
for _, testCase := range []struct {
|
||||||
|
name string
|
||||||
|
dbVersion int64
|
||||||
|
semver string
|
||||||
|
config string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "5.0.0 with no " + message,
|
||||||
|
dbVersion: ForgejoV5DatabaseVersion,
|
||||||
|
semver: "5.0.0+0-gitea-1.20.1",
|
||||||
|
config: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "5.0.1 with no " + message,
|
||||||
|
dbVersion: ForgejoV5DatabaseVersion,
|
||||||
|
semver: "5.0.1+0-gitea-1.20.2",
|
||||||
|
config: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "5.0.2 with " + message,
|
||||||
|
dbVersion: ForgejoV5DatabaseVersion,
|
||||||
|
semver: "5.0.2+0-gitea-1.20.3",
|
||||||
|
config: config,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "6.0.0 with " + message,
|
||||||
|
dbVersion: ForgejoV6DatabaseVersion,
|
||||||
|
semver: "6.0.0+0-gitea-1.21.0",
|
||||||
|
config: config,
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
cfg := configFixture(t, testCase.config)
|
||||||
|
semver.SetVersionString(ctx, testCase.semver)
|
||||||
|
assert.NoError(t, v1TOv5_0_1Included(e, testCase.dbVersion, cfg))
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range []struct {
|
||||||
|
name string
|
||||||
|
dbVersion int64
|
||||||
|
semver string
|
||||||
|
config string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "5.0.0 with " + message,
|
||||||
|
dbVersion: ForgejoV5DatabaseVersion,
|
||||||
|
semver: "5.0.0+0-gitea-1.20.1",
|
||||||
|
config: config,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "5.0.1 with " + message,
|
||||||
|
dbVersion: ForgejoV5DatabaseVersion,
|
||||||
|
semver: "5.0.1+0-gitea-1.20.2",
|
||||||
|
config: config,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
//
|
||||||
|
// When upgrading from
|
||||||
|
//
|
||||||
|
// Forgejo >= 5.0.1+0-gitea-1.20.2
|
||||||
|
// Gitea > v1.21
|
||||||
|
//
|
||||||
|
// The version that the server was running prior to the upgrade
|
||||||
|
// is not available.
|
||||||
|
//
|
||||||
|
name: semver.DefaultVersionString + " with " + message,
|
||||||
|
dbVersion: ForgejoV4DatabaseVersion,
|
||||||
|
semver: semver.DefaultVersionString,
|
||||||
|
config: config,
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
cfg := configFixture(t, testCase.config)
|
||||||
|
semver.SetVersionString(ctx, testCase.semver)
|
||||||
|
assert.ErrorContains(t, v1TOv5_0_1Included(e, testCase.dbVersion, cfg), message)
|
||||||
|
}
|
||||||
|
}
|
|
@ -352,6 +352,7 @@ func (f *NewDingtalkHookForm) Validate(req *http.Request, errs binding.Errors) b
|
||||||
type NewTelegramHookForm struct {
|
type NewTelegramHookForm struct {
|
||||||
BotToken string `binding:"Required"`
|
BotToken string `binding:"Required"`
|
||||||
ChatID string `binding:"Required"`
|
ChatID string `binding:"Required"`
|
||||||
|
ThreadID string
|
||||||
WebhookForm
|
WebhookForm
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -427,6 +427,23 @@ func (diffFile *DiffFile) ShouldBeHidden() bool {
|
||||||
return diffFile.IsGenerated || diffFile.IsViewed
|
return diffFile.IsGenerated || diffFile.IsViewed
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (diffFile *DiffFile) ModeTranslationKey(mode string) string {
|
||||||
|
switch mode {
|
||||||
|
case "040000":
|
||||||
|
return "git.filemode.directory"
|
||||||
|
case "100644":
|
||||||
|
return "git.filemode.normal_file"
|
||||||
|
case "100755":
|
||||||
|
return "git.filemode.executable_file"
|
||||||
|
case "120000":
|
||||||
|
return "git.filemode.symbolic_link"
|
||||||
|
case "160000":
|
||||||
|
return "git.filemode.submodule"
|
||||||
|
default:
|
||||||
|
return mode
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func getCommitFileLineCount(commit *git.Commit, filePath string) int {
|
func getCommitFileLineCount(commit *git.Commit, filePath string) int {
|
||||||
blob, err := commit.GetBlobByPath(filePath)
|
blob, err := commit.GetBlobByPath(filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -247,7 +247,7 @@ func deleteIssue(ctx context.Context, issue *issues_model.Issue) error {
|
||||||
issue.MilestoneID, err)
|
issue.MilestoneID, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := activities_model.DeleteIssueActions(ctx, issue.RepoID, issue.ID); err != nil {
|
if err := activities_model.DeleteIssueActions(ctx, issue.RepoID, issue.ID, issue.Index); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -212,7 +212,7 @@ func buildPackagesIndices(ctx context.Context, ownerID int64, repoVersion *packa
|
||||||
}
|
}
|
||||||
addSeparator = true
|
addSeparator = true
|
||||||
|
|
||||||
fmt.Fprint(w, pfd.Properties.GetByName(debian_module.PropertyControl))
|
fmt.Fprintf(w, "%s\n", strings.TrimSpace(pfd.Properties.GetByName(debian_module.PropertyControl)))
|
||||||
|
|
||||||
fmt.Fprintf(w, "Filename: pool/%s/%s/%s\n", distribution, component, pfd.File.Name)
|
fmt.Fprintf(w, "Filename: pool/%s/%s/%s\n", distribution, component, pfd.File.Name)
|
||||||
fmt.Fprintf(w, "Size: %d\n", pfd.Blob.Size)
|
fmt.Fprintf(w, "Size: %d\n", pfd.Blob.Size)
|
||||||
|
|
|
@ -143,7 +143,7 @@ func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullR
|
||||||
return "", errors.Wrap(err, "LoadBaseRepo")
|
return "", errors.Wrap(err, "LoadBaseRepo")
|
||||||
}
|
}
|
||||||
|
|
||||||
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptions{})
|
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptions{ListAll: true})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", errors.Wrap(err, "GetLatestCommitStatus")
|
return "", errors.Wrap(err, "GetLatestCommitStatus")
|
||||||
}
|
}
|
||||||
|
|
|
@ -799,7 +799,7 @@ func getAllCommitStatus(gitRepo *git.Repository, pr *issues_model.PullRequest) (
|
||||||
return nil, nil, shaErr
|
return nil, nil, shaErr
|
||||||
}
|
}
|
||||||
|
|
||||||
statuses, _, err = git_model.GetLatestCommitStatus(db.DefaultContext, pr.BaseRepo.ID, sha, db.ListOptions{})
|
statuses, _, err = git_model.GetLatestCommitStatus(db.DefaultContext, pr.BaseRepo.ID, sha, db.ListOptions{ListAll: true})
|
||||||
lastStatus = git_model.CalcCommitStatus(statuses)
|
lastStatus = git_model.CalcCommitStatus(statuses)
|
||||||
return statuses, lastStatus, err
|
return statuses, lastStatus, err
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue