forked from forgejo/forgejo
Remove legacy unknwon/com
package (#19298)
Follows: #19284 * The `CopyDir` is only used inside test code * Rewrite `ToSnakeCase` with more test cases * The `RedisCacher` only put strings into cache, here we use internal `toStr` to replace the legacy `ToStr` * The `UniqueQueue` can use string as ID directly, no need to call `ToStr`
This commit is contained in:
parent
4c5cb1e2f2
commit
4f27c28947
17 changed files with 353 additions and 58 deletions
|
@ -7,12 +7,38 @@ package util
|
|||
import (
|
||||
"crypto/aes"
|
||||
"crypto/rand"
|
||||
"fmt"
|
||||
"os"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/unknwon/com" //nolint:depguard
|
||||
)
|
||||
|
||||
func TestCopyFile(t *testing.T) {
|
||||
testContent := []byte("hello")
|
||||
|
||||
tmpDir := os.TempDir()
|
||||
now := time.Now()
|
||||
srcFile := fmt.Sprintf("%s/copy-test-%d-src.txt", tmpDir, now.UnixMicro())
|
||||
dstFile := fmt.Sprintf("%s/copy-test-%d-dst.txt", tmpDir, now.UnixMicro())
|
||||
|
||||
_ = os.Remove(srcFile)
|
||||
_ = os.Remove(dstFile)
|
||||
defer func() {
|
||||
_ = os.Remove(srcFile)
|
||||
_ = os.Remove(dstFile)
|
||||
}()
|
||||
|
||||
err := os.WriteFile(srcFile, testContent, 0o777)
|
||||
assert.NoError(t, err)
|
||||
err = CopyFile(srcFile, dstFile)
|
||||
assert.NoError(t, err)
|
||||
dstContent, err := os.ReadFile(dstFile)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, testContent, dstContent)
|
||||
}
|
||||
|
||||
func TestAESGCM(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
|
@ -29,9 +55,4 @@ func TestAESGCM(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, plaintext, decrypted)
|
||||
|
||||
// at the moment, we make sure the result is the same as the legacy package, this assertion can be removed in next round refactoring
|
||||
legacy, err := com.AESGCMDecrypt(key, ciphertext)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, legacy, plaintext)
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue