🎉 alpha version

This commit is contained in:
loveuer
2024-12-30 15:09:02 +08:00
commit 25b36157c7
54 changed files with 5910 additions and 0 deletions

97
pkg/api/api.go Normal file
View File

@ -0,0 +1,97 @@
package api
import "sync"
const (
_404 = `404 Not Found`
_405 = `405 Method Not Allowed`
_500 = `500 Internal Server Error`
TraceKey = "X-Trace-Id"
)
type Map map[string]interface{}
type Config struct {
DisableMessagePrint bool `json:"-"`
// Default: 4 * 1024 * 1024
BodyLimit int64 `json:"-"`
// if report http.ErrServerClosed as run err
ErrServeClose bool `json:"-"`
DisableLogger bool `json:"-"`
DisableRecover bool `json:"-"`
DisableHttpErrorLog bool `json:"-"`
// EnableNotImplementHandler bool `json:"-"`
NotFoundHandler HandlerFunc `json:"-"`
MethodNotAllowedHandler HandlerFunc `json:"-"`
}
var defaultConfig = &Config{
BodyLimit: 4 * 1024 * 1024,
NotFoundHandler: func(c *Ctx) error {
c.Set("Content-Type", MIMETextPlain)
_, err := c.Status(404).Write([]byte(_404))
return err
},
MethodNotAllowedHandler: func(c *Ctx) error {
c.Set("Content-Type", MIMETextPlain)
_, err := c.Status(405).Write([]byte(_405))
return err
},
}
func New(config ...Config) *App {
app := &App{
RouterGroup: RouterGroup{
Handlers: nil,
basePath: "/",
root: true,
},
pool: &sync.Pool{},
redirectTrailingSlash: true, // true
redirectFixedPath: false, // false
handleMethodNotAllowed: true, // false
useRawPath: false, // false
unescapePathValues: true, // true
removeExtraSlash: false, // false
}
if len(config) > 0 {
app.config = &config[0]
if app.config.BodyLimit == 0 {
app.config.BodyLimit = defaultConfig.BodyLimit
}
if app.config.NotFoundHandler == nil {
app.config.NotFoundHandler = defaultConfig.NotFoundHandler
}
if app.config.MethodNotAllowedHandler == nil {
app.config.MethodNotAllowedHandler = defaultConfig.MethodNotAllowedHandler
}
} else {
app.config = defaultConfig
}
app.RouterGroup.app = app
if !app.config.DisableLogger {
app.Use(NewLogger())
}
if !app.config.DisableRecover {
app.Use(NewRecover(true))
}
app.pool.New = func() any {
return app.allocateContext()
}
return app
}

300
pkg/api/app.go Normal file
View File

@ -0,0 +1,300 @@
package api
import (
"context"
"crypto/tls"
"errors"
"io"
"log"
"net"
"net/http"
"path"
"regexp"
"sync"
"github.com/loveuer/upp/internal/bytesconv"
"github.com/loveuer/upp/pkg/interfaces"
)
var (
_ IRouter = (*App)(nil)
regSafePrefix = regexp.MustCompile("[^a-zA-Z0-9/-]+")
regRemoveRepeatedChar = regexp.MustCompile("/{2,}")
)
type App struct {
RouterGroup
Upp interfaces.Upp
config *Config
groups []*RouterGroup
server *http.Server
trees methodTrees
pool *sync.Pool
maxParams uint16
maxSections uint16
redirectTrailingSlash bool // true
redirectFixedPath bool // false
handleMethodNotAllowed bool // false
useRawPath bool // false
unescapePathValues bool // true
removeExtraSlash bool // false
}
func (a *App) allocateContext() *Ctx {
var (
skippedNodes = make([]skippedNode, 0, a.maxSections)
v = make(Params, 0, a.maxParams)
)
ctx := Ctx{
lock: sync.Mutex{},
app: a,
index: -1,
locals: make(map[string]any),
handlers: make([]HandlerFunc, 0),
skippedNodes: &skippedNodes,
params: &v,
}
return &ctx
}
func (a *App) ServeHTTP(writer http.ResponseWriter, request *http.Request) {
var (
err error
c = a.pool.Get().(*Ctx)
nfe = new(Err)
)
c.reset(writer, request)
if err = c.verify(); err != nil {
if errors.As(err, nfe) {
_ = c.Status(nfe.Status).SendString(nfe.Msg)
return
}
_ = c.Status(500).SendString(err.Error())
return
}
a.handleHTTPRequest(c)
a.pool.Put(c)
}
func (a *App) run(ln net.Listener) error {
srv := &http.Server{Handler: a}
if a.config.DisableHttpErrorLog {
srv.ErrorLog = log.New(io.Discard, "", 0)
}
a.server = srv
err := a.server.Serve(ln)
if !errors.Is(err, http.ErrServerClosed) || a.config.ErrServeClose {
return err
}
return nil
}
func (a *App) Run(address string) error {
ln, err := net.Listen("tcp", address)
if err != nil {
return err
}
return a.run(ln)
}
func (a *App) RunTLS(address string, tlsConfig *tls.Config) error {
ln, err := tls.Listen("tcp", address, tlsConfig)
if err != nil {
return err
}
return a.run(ln)
}
func (a *App) RunListener(ln net.Listener) error {
a.server = &http.Server{Addr: ln.Addr().String()}
return a.run(ln)
}
func (a *App) Shutdown(ctx context.Context) error {
return a.server.Shutdown(ctx)
}
func (a *App) addRoute(method, path string, handlers ...HandlerFunc) {
elsePanic(path[0] == '/', "path must begin with '/'")
elsePanic(method != "", "HTTP method can not be empty")
elsePanic(len(handlers) > 0, "without enable not implement, there must be at least one handler")
// if !a.config.DisableMessagePrint {
// fmt.Printf("[NF] Add Route: %-8s - %-25s (%2d handlers)\n", method, path, len(handlers))
// }
root := a.trees.get(method)
if root == nil {
root = new(node)
root.fullPath = "/"
a.trees = append(a.trees, methodTree{method: method, root: root})
}
root.addRoute(path, handlers...)
if paramsCount := countParams(path); paramsCount > a.maxParams {
a.maxParams = paramsCount
}
if sectionsCount := countSections(path); sectionsCount > a.maxSections {
a.maxSections = sectionsCount
}
}
func (a *App) handleHTTPRequest(c *Ctx) {
var err error
httpMethod := c.Request.Method
rPath := c.Request.URL.Path
unescape := false
if a.useRawPath && len(c.Request.URL.RawPath) > 0 {
rPath = c.Request.URL.RawPath
unescape = a.unescapePathValues
}
if a.removeExtraSlash {
rPath = cleanPath(rPath)
}
// Find root of the tree for the given HTTP method
t := a.trees
for i, tl := 0, len(t); i < tl; i++ {
if t[i].method != httpMethod {
continue
}
root := t[i].root
// Find route in tree
value := root.getValue(rPath, c.params, c.skippedNodes, unescape)
if value.params != nil {
c.params = value.params
}
if value.handlers != nil {
c.handlers = value.handlers
c.fullPath = value.fullPath
if err = c.Next(); err != nil {
serveError(c, errorHandler)
}
return
}
if httpMethod != http.MethodConnect && rPath != "/" {
if value.tsr && a.redirectTrailingSlash {
redirectTrailingSlash(c)
return
}
if a.redirectFixedPath && redirectFixedPath(c, root, a.redirectFixedPath) {
return
}
}
break
}
if a.handleMethodNotAllowed {
// According to RFC 7231 section 6.5.5, MUST generate an Allow header field in response
// containing a list of the target resource's currently supported methods.
allowed := make([]string, 0, len(t)-1)
for _, tree := range a.trees {
if tree.method == httpMethod {
continue
}
if value := tree.root.getValue(rPath, nil, c.skippedNodes, unescape); value.handlers != nil {
allowed = append(allowed, tree.method)
}
}
if len(allowed) > 0 {
c.handlers = a.combineHandlers(a.config.MethodNotAllowedHandler)
_ = c.Next()
return
}
}
c.handlers = a.combineHandlers(a.config.NotFoundHandler)
_ = c.Next()
return
}
func errorHandler(c *Ctx) error {
return c.Status(500).SendString(_500)
}
func serveError(c *Ctx, handler HandlerFunc) {
err := c.Next()
if c.writermem.Written() {
return
}
_ = handler(c)
_ = err
}
func redirectTrailingSlash(c *Ctx) {
req := c.Request
p := req.URL.Path
if prefix := path.Clean(c.Request.Header.Get("X-Forwarded-Prefix")); prefix != "." {
prefix = regSafePrefix.ReplaceAllString(prefix, "")
prefix = regRemoveRepeatedChar.ReplaceAllString(prefix, "/")
p = prefix + "/" + req.URL.Path
}
req.URL.Path = p + "/"
if length := len(p); length > 1 && p[length-1] == '/' {
req.URL.Path = p[:length-1]
}
redirectRequest(c)
}
func redirectFixedPath(c *Ctx, root *node, trailingSlash bool) bool {
req := c.Request
rPath := req.URL.Path
if fixedPath, ok := root.findCaseInsensitivePath(cleanPath(rPath), trailingSlash); ok {
req.URL.Path = bytesconv.BytesToString(fixedPath)
redirectRequest(c)
return true
}
return false
}
func redirectRequest(c *Ctx) {
req := c.Request
// rPath := req.URL.Path
rURL := req.URL.String()
code := http.StatusMovedPermanently // Permanent redirect, request with GET method
if req.Method != http.MethodGet {
code = http.StatusTemporaryRedirect
}
// debugPrint("redirecting request %d: %s --> %s", code, rPath, rURL)
http.Redirect(c.Writer, req, rURL, code)
c.writermem.WriteHeaderNow()
}

384
pkg/api/ctx.go Normal file
View File

@ -0,0 +1,384 @@
package api
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"html/template"
"io"
"mime/multipart"
"net"
"net/http"
"strings"
"sync"
"github.com/elastic/go-elasticsearch/v7"
"github.com/google/uuid"
"github.com/loveuer/upp/internal/sse"
"github.com/loveuer/upp/pkg/cache"
"github.com/loveuer/upp/pkg/interfaces"
"gorm.io/gorm"
)
var forwardHeaders = []string{"CF-Connecting-IP", "X-Forwarded-For", "X-Real-Ip"}
type Ctx struct {
lock sync.Mutex
writermem responseWriter
Writer ResponseWriter
Request *http.Request
path string
method string
StatusCode int
app *App
params *Params
index int
handlers []HandlerFunc
locals map[string]interface{}
skippedNodes *[]skippedNode
fullPath string
}
func (c *Ctx) UseLogger() interfaces.Logger {
return c.app.Upp.UseLogger(c.Context())
}
func (c *Ctx) UseDB(ctx ...context.Context) *gorm.DB {
return c.app.Upp.UseDB(ctx...)
}
func (c *Ctx) UseCache() cache.Cache {
return c.app.Upp.UseCache()
}
func (c *Ctx) UseES() elasticsearch.Client {
return *c.app.Upp.UseES()
}
func (c *Ctx) reset(w http.ResponseWriter, r *http.Request) {
traceId := r.Header.Get(TraceKey)
if traceId == "" {
traceId = uuid.Must(uuid.NewV7()).String()
}
c.writermem.reset(w)
c.Request = r.WithContext(context.WithValue(r.Context(), TraceKey, traceId))
c.Writer = &c.writermem
c.handlers = nil
c.index = -1
c.path = r.URL.Path
c.method = r.Method
c.StatusCode = 200
c.fullPath = ""
*c.params = (*c.params)[:0]
*c.skippedNodes = (*c.skippedNodes)[:0]
for key := range c.locals {
delete(c.locals, key)
}
c.writermem.Header().Set(TraceKey, traceId)
}
func (c *Ctx) Locals(key string, value ...interface{}) interface{} {
data := c.locals[key]
if len(value) > 0 {
c.locals[key] = value[0]
}
return data
}
func (c *Ctx) Method(overWrite ...string) string {
method := c.Request.Method
if len(overWrite) > 0 && overWrite[0] != "" {
c.Request.Method = overWrite[0]
}
return method
}
func (c *Ctx) Path(overWrite ...string) string {
path := c.Request.URL.Path
if len(overWrite) > 0 && overWrite[0] != "" {
c.Request.URL.Path = overWrite[0]
}
return path
}
func (c *Ctx) Cookies(key string, defaultValue ...string) string {
dv := ""
if len(defaultValue) > 0 {
dv = defaultValue[0]
}
cookie, err := c.Request.Cookie(key)
if err != nil || cookie.Value == "" {
return dv
}
return cookie.Value
}
func (c *Ctx) Context() context.Context {
return c.Request.Context()
}
func (c *Ctx) Next() error {
c.index++
if c.index >= len(c.handlers) {
return nil
}
var (
err error
handler = c.handlers[c.index]
)
if handler != nil {
if err = handler(c); err != nil {
return err
}
}
c.index++
return nil
}
/* ===============================================================
|| Handle Ctx Request Part
=============================================================== */
func (c *Ctx) verify() error {
// 验证 body size
if c.app.config.BodyLimit != -1 && c.Request.ContentLength > c.app.config.BodyLimit {
return NewNFError(413, "Content Too Large")
}
return nil
}
func (c *Ctx) Param(key string) string {
return c.params.ByName(key)
}
func (c *Ctx) SetParam(key, value string) {
c.lock.Lock()
defer c.lock.Unlock()
params := append(*c.params, Param{Key: key, Value: value})
c.params = &params
}
func (c *Ctx) Form(key string) string {
return c.Request.FormValue(key)
}
// FormValue fiber ctx function
func (c *Ctx) FormValue(key string) string {
return c.Request.FormValue(key)
}
func (c *Ctx) FormFile(key string) (*multipart.FileHeader, error) {
_, fh, err := c.Request.FormFile(key)
return fh, err
}
func (c *Ctx) MultipartForm() (*multipart.Form, error) {
if err := c.Request.ParseMultipartForm(c.app.config.BodyLimit); err != nil {
return nil, err
}
return c.Request.MultipartForm, nil
}
func (c *Ctx) Query(key string) string {
return c.Request.URL.Query().Get(key)
}
func (c *Ctx) Get(key string, defaultValue ...string) string {
value := c.Request.Header.Get(key)
if value == "" && len(defaultValue) > 0 {
return defaultValue[0]
}
return value
}
func (c *Ctx) IP(useProxyHeader ...bool) string {
ip, _, err := net.SplitHostPort(strings.TrimSpace(c.Request.RemoteAddr))
if err != nil {
return ""
}
if len(useProxyHeader) > 0 && useProxyHeader[0] {
for _, h := range forwardHeaders {
for _, rip := range strings.Split(c.Request.Header.Get(h), ",") {
realIP := net.ParseIP(strings.Replace(rip, " ", "", -1))
if check := net.ParseIP(realIP.String()); check != nil {
ip = realIP.String()
break
}
}
}
}
return ip
}
func (c *Ctx) BodyParser(out interface{}) error {
var (
err error
ctype = strings.ToLower(c.Request.Header.Get("Content-Type"))
)
ctype = parseVendorSpecificContentType(ctype)
ctypeEnd := strings.IndexByte(ctype, ';')
if ctypeEnd != -1 {
ctype = ctype[:ctypeEnd]
}
if strings.HasSuffix(ctype, "json") {
bs, err := io.ReadAll(c.Request.Body)
if err != nil {
return err
}
_ = c.Request.Body.Close()
c.Request.Body = io.NopCloser(bytes.NewReader(bs))
return json.Unmarshal(bs, out)
}
if strings.HasPrefix(ctype, MIMEApplicationForm) {
if err = c.Request.ParseForm(); err != nil {
return NewNFError(400, err.Error())
}
return parseToStruct("form", out, c.Request.Form)
}
if strings.HasPrefix(ctype, MIMEMultipartForm) {
if err = c.Request.ParseMultipartForm(c.app.config.BodyLimit); err != nil {
return NewNFError(400, err.Error())
}
return parseToStruct("form", out, c.Request.PostForm)
}
return NewNFError(422, "Unprocessable Content")
}
func (c *Ctx) QueryParser(out interface{}) error {
return parseToStruct("query", out, c.Request.URL.Query())
}
/* ===============================================================
|| Handle Ctx Response Part
=============================================================== */
func (c *Ctx) Status(code int) *Ctx {
c.lock.Lock()
defer c.lock.Unlock()
c.Writer.WriteHeader(code)
c.StatusCode = c.writermem.status
return c
}
// Set set response header
func (c *Ctx) Set(key string, value string) {
c.Writer.Header().Set(key, value)
}
// AddHeader add response header
func (c *Ctx) AddHeader(key string, value string) {
c.Writer.Header().Add(key, value)
}
// SetHeader set response header
func (c *Ctx) SetHeader(key string, value string) {
c.Writer.Header().Set(key, value)
}
func (c *Ctx) SendStatus(code int) error {
c.Status(code)
c.Writer.WriteHeaderNow()
return nil
}
func (c *Ctx) SendString(data string) error {
c.SetHeader("Content-Type", "text/plain")
_, err := c.Write([]byte(data))
return err
}
func (c *Ctx) Writef(format string, values ...interface{}) (int, error) {
c.SetHeader("Content-Type", "text/plain")
return c.Write([]byte(fmt.Sprintf(format, values...)))
}
func (c *Ctx) JSON(data interface{}) error {
c.SetHeader("Content-Type", MIMEApplicationJSON)
encoder := json.NewEncoder(c.Writer)
if err := encoder.Encode(data); err != nil {
return err
}
return nil
}
func (c *Ctx) SSEvent(event string, data interface{}) error {
c.Set("Content-Type", "text/event-stream")
c.Set("Cache-Control", "no-cache")
c.Set("Transfer-Encoding", "chunked")
return sse.Encode(c.Writer, sse.Event{Event: event, Data: data})
}
func (c *Ctx) Flush() error {
if f, ok := c.Writer.(http.Flusher); ok {
f.Flush()
return nil
}
return errors.New("http.Flusher is not implemented")
}
func (c *Ctx) HTML(html string) error {
c.SetHeader("Content-Type", "text/html")
_, err := c.Write([]byte(html))
return err
}
func (c *Ctx) RenderHTML(name, html string, obj any) error {
c.SetHeader("Content-Type", "text/html")
t, err := template.New(name).Parse(html)
if err != nil {
return err
}
return t.Execute(c.Writer, obj)
}
func (c *Ctx) Redirect(url string, code int) error {
http.Redirect(c.Writer, c.Request, url, code)
return nil
}
func (c *Ctx) Write(data []byte) (int, error) {
return c.Writer.Write(data)
}

16
pkg/api/error.go Normal file
View File

@ -0,0 +1,16 @@
package api
import "strconv"
type Err struct {
Status int
Msg string
}
func (n Err) Error() string {
return strconv.Itoa(n.Status) + " " + n.Msg
}
func NewNFError(status int, msg string) Err {
return Err{Status: status, Msg: msg}
}

9
pkg/api/handler.go Normal file
View File

@ -0,0 +1,9 @@
package api
import "fmt"
type HandlerFunc func(*Ctx) error
func ToDoHandler(c *Ctx) error {
return c.Status(501).SendString(fmt.Sprintf("%s - %s Not Implemented", c.Method(), c.Path()))
}

67
pkg/api/middleware.go Normal file
View File

@ -0,0 +1,67 @@
package api
import (
"fmt"
"os"
"runtime/debug"
"strconv"
"time"
"github.com/loveuer/nf"
"github.com/loveuer/nf/nft/log"
"github.com/loveuer/nf/nft/resp"
"github.com/loveuer/upp/pkg/tool"
)
func NewRecover(enableStackTrace bool) HandlerFunc {
return func(c *Ctx) error {
defer func() {
if r := recover(); r != nil {
if enableStackTrace {
os.Stderr.WriteString(fmt.Sprintf("recovered from panic: %v\n%s\n", r, debug.Stack()))
} else {
os.Stderr.WriteString(fmt.Sprintf("recovered from panic: %v\n", r))
}
_ = c.Status(500).SendString(fmt.Sprint(r))
}
}()
return c.Next()
}
}
func NewLogger() HandlerFunc {
return func(c *Ctx) error {
var (
now = time.Now()
logFn func(msg string, data ...any)
ip = c.IP()
)
traceId := c.Context().Value(nf.TraceKey)
c.Locals(nf.TraceKey, traceId)
err := c.Next()
c.Writer.Header().Set(nf.TraceKey, fmt.Sprint(traceId))
status, _ := strconv.Atoi(c.Writer.Header().Get(resp.RealStatusHeader))
duration := time.Since(now)
msg := fmt.Sprintf("%s | %15s | %d[%3d] | %s | %6s | %s", traceId, ip, c.StatusCode, status, tool.HumanDuration(duration.Nanoseconds()), c.Method(), c.Path())
switch {
case status >= 500:
logFn = log.Error
case status >= 400:
logFn = log.Warn
default:
logFn = log.Info
}
logFn(msg)
return err
}
}

134
pkg/api/response_writer.go Normal file
View File

@ -0,0 +1,134 @@
package api
import (
"bufio"
"fmt"
"io"
"net"
"net/http"
)
const (
noWritten = -1
defaultStatus = http.StatusOK
)
// ResponseWriter ...
type ResponseWriter interface {
http.ResponseWriter
http.Hijacker
http.Flusher
http.CloseNotifier
// Status returns the HTTP response status code of the current request.
Status() int
// Size returns the number of bytes already written into the response http body.
// See Written()
Size() int
// WriteString writes the string into the response body.
WriteString(string) (int, error)
// Written returns true if the response body was already written.
Written() bool
// WriteHeaderNow forces to write the http header (status code + headers).
WriteHeaderNow()
// Pusher get the http.Pusher for server push
Pusher() http.Pusher
}
type responseWriter struct {
http.ResponseWriter
written bool
size int
status int
}
var _ ResponseWriter = (*responseWriter)(nil)
func (w *responseWriter) Unwrap() http.ResponseWriter {
return w.ResponseWriter
}
func (w *responseWriter) reset(writer http.ResponseWriter) {
w.ResponseWriter = writer
w.size = noWritten
w.status = defaultStatus
}
func (w *responseWriter) WriteHeader(code int) {
if code > 0 && w.status != code {
if w.Written() {
fmt.Printf("WARNING: Headers were already written. Wanted to override status code %d with %d", w.status, code)
return
}
w.status = code
}
}
func (w *responseWriter) WriteHeaderNow() {
if !w.Written() {
w.size = 0
if w.status == 0 {
w.status = 200
}
w.ResponseWriter.WriteHeader(w.status)
}
}
func (w *responseWriter) Write(data []byte) (n int, err error) {
w.WriteHeaderNow()
n, err = w.ResponseWriter.Write(data)
w.size += n
return
}
func (w *responseWriter) WriteString(s string) (n int, err error) {
w.WriteHeaderNow()
n, err = io.WriteString(w.ResponseWriter, s)
w.size += n
return
}
func (w *responseWriter) Status() int {
return w.status
}
func (w *responseWriter) Size() int {
return w.size
}
func (w *responseWriter) Written() bool {
return w.size != noWritten || w.written
}
// Hijack implements the http.Hijacker interface.
func (w *responseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) {
if w.size < 0 {
w.size = 0
}
return w.ResponseWriter.(http.Hijacker).Hijack()
}
// CloseNotify implements the http.CloseNotifier interface.
func (w *responseWriter) CloseNotify() <-chan bool {
return w.ResponseWriter.(http.CloseNotifier).CloseNotify()
}
// Flush implements the http.Flusher interface.
func (w *responseWriter) Flush() {
w.WriteHeaderNow()
w.ResponseWriter.(http.Flusher).Flush()
}
func (w *responseWriter) Pusher() (pusher http.Pusher) {
if pusher, ok := w.ResponseWriter.(http.Pusher); ok {
return pusher
}
return nil
}

155
pkg/api/routergroup.go Normal file
View File

@ -0,0 +1,155 @@
package api
import (
"math"
"net/http"
"path"
"regexp"
)
var (
// regEnLetter matches english letters for http method name
regEnLetter = regexp.MustCompile("^[A-Z]+$")
// anyMethods for RouterGroup Any method
anyMethods = []string{
http.MethodGet, http.MethodPost, http.MethodPut, http.MethodPatch,
http.MethodHead, http.MethodOptions, http.MethodDelete, http.MethodConnect,
http.MethodTrace,
}
)
// IRouter defines all router handle interface includes single and group router.
type IRouter interface {
IRoutes
Group(string, ...HandlerFunc) *RouterGroup
}
// IRoutes defines all router handle interface.
type IRoutes interface {
Use(...HandlerFunc) IRoutes
Handle(string, string, ...HandlerFunc) IRoutes
Any(string, ...HandlerFunc) IRoutes
GET(string, ...HandlerFunc) IRoutes
POST(string, ...HandlerFunc) IRoutes
DELETE(string, ...HandlerFunc) IRoutes
PATCH(string, ...HandlerFunc) IRoutes
PUT(string, ...HandlerFunc) IRoutes
OPTIONS(string, ...HandlerFunc) IRoutes
HEAD(string, ...HandlerFunc) IRoutes
Match([]string, string, ...HandlerFunc) IRoutes
// StaticFile(string, string) IRoutes
// StaticFileFS(string, string, http.FileSystem) IRoutes
// Static(string, string) IRoutes
// StaticFS(string, http.FileSystem) IRoutes
}
type RouterGroup struct {
Handlers []HandlerFunc
basePath string
app *App
root bool
}
var _ IRouter = (*RouterGroup)(nil)
func (group *RouterGroup) Use(middleware ...HandlerFunc) IRoutes {
group.Handlers = append(group.Handlers, middleware...)
return group.returnObj()
}
func (group *RouterGroup) Group(relativePath string, handlers ...HandlerFunc) *RouterGroup {
return &RouterGroup{
Handlers: group.combineHandlers(handlers...),
basePath: group.calculateAbsolutePath(relativePath),
app: group.app,
}
}
func (group *RouterGroup) BasePath() string {
return group.basePath
}
func (group *RouterGroup) handle(httpMethod, relativePath string, handlers ...HandlerFunc) IRoutes {
absolutePath := group.calculateAbsolutePath(relativePath)
handlers = group.combineHandlers(handlers...)
group.app.addRoute(httpMethod, absolutePath, handlers...)
return group.returnObj()
}
func (group *RouterGroup) Handle(httpMethod, relativePath string, handlers ...HandlerFunc) IRoutes {
if matched := regEnLetter.MatchString(httpMethod); !matched {
panic("http method " + httpMethod + " is not valid")
}
return group.handle(httpMethod, relativePath, handlers...)
}
func (group *RouterGroup) POST(relativePath string, handlers ...HandlerFunc) IRoutes {
return group.handle(http.MethodPost, relativePath, handlers...)
}
func (group *RouterGroup) GET(relativePath string, handlers ...HandlerFunc) IRoutes {
return group.handle(http.MethodGet, relativePath, handlers...)
}
func (group *RouterGroup) DELETE(relativePath string, handlers ...HandlerFunc) IRoutes {
return group.handle(http.MethodDelete, relativePath, handlers...)
}
func (group *RouterGroup) PATCH(relativePath string, handlers ...HandlerFunc) IRoutes {
return group.handle(http.MethodPatch, relativePath, handlers...)
}
func (group *RouterGroup) PUT(relativePath string, handlers ...HandlerFunc) IRoutes {
return group.handle(http.MethodPut, relativePath, handlers...)
}
func (group *RouterGroup) OPTIONS(relativePath string, handlers ...HandlerFunc) IRoutes {
return group.handle(http.MethodOptions, relativePath, handlers...)
}
func (group *RouterGroup) HEAD(relativePath string, handlers ...HandlerFunc) IRoutes {
return group.handle(http.MethodHead, relativePath, handlers...)
}
// Any registers a route that matches all the HTTP methods.
// GET, POST, PUT, PATCH, HEAD, OPTIONS, DELETE, CONNECT, TRACE.
func (group *RouterGroup) Any(relativePath string, handlers ...HandlerFunc) IRoutes {
for _, method := range anyMethods {
group.handle(method, relativePath, handlers...)
}
return group.returnObj()
}
func (group *RouterGroup) Match(methods []string, relativePath string, handlers ...HandlerFunc) IRoutes {
for _, method := range methods {
group.handle(method, relativePath, handlers...)
}
return group.returnObj()
}
const abortIndex int8 = math.MaxInt8 >> 1
func (group *RouterGroup) combineHandlers(handlers ...HandlerFunc) []HandlerFunc {
finalSize := len(group.Handlers) + len(handlers)
elsePanic(finalSize < int(abortIndex), "too many handlers")
mergedHandlers := make([]HandlerFunc, finalSize)
copy(mergedHandlers, group.Handlers)
copy(mergedHandlers[len(group.Handlers):], handlers)
return mergedHandlers
}
func (group *RouterGroup) calculateAbsolutePath(relativePath string) string {
return path.Join(group.basePath, relativePath)
}
func (group *RouterGroup) returnObj() IRoutes {
if group.root {
return group.app
}
return group
}

891
pkg/api/tree.go Normal file
View File

@ -0,0 +1,891 @@
package api
import (
"bytes"
"net/url"
"strings"
"unicode"
"unicode/utf8"
"github.com/loveuer/upp/internal/bytesconv"
)
var (
strColon = []byte(":")
strStar = []byte("*")
strSlash = []byte("/")
)
// Param is a single URL parameter, consisting of a key and a value.
type Param struct {
Key string
Value string
}
// Params is a Param-slice, as returned by the router.
// The slice is ordered, the first URL parameter is also the first slice value.
// It is therefore safe to read values by the index.
type Params []Param
// Get returns the value of the first Param which key matches the given name and a boolean true.
// If no matching Param is found, an empty string is returned and a boolean false .
func (ps Params) Get(name string) (string, bool) {
for _, entry := range ps {
if entry.Key == name {
return entry.Value, true
}
}
return "", false
}
// ByName returns the value of the first Param which key matches the given name.
// If no matching Param is found, an empty string is returned.
func (ps Params) ByName(name string) (va string) {
va, _ = ps.Get(name)
return
}
type methodTree struct {
method string
root *node
}
type methodTrees []methodTree
func (trees methodTrees) get(method string) *node {
for _, tree := range trees {
if tree.method == method {
return tree.root
}
}
return nil
}
func min(a, b int) int {
if a <= b {
return a
}
return b
}
func longestCommonPrefix(a, b string) int {
i := 0
max := min(len(a), len(b))
for i < max && a[i] == b[i] {
i++
}
return i
}
// addChild will add a child node, keeping wildcardChild at the end
func (n *node) addChild(child *node) {
if n.wildChild && len(n.children) > 0 {
wildcardChild := n.children[len(n.children)-1]
n.children = append(n.children[:len(n.children)-1], child, wildcardChild)
} else {
n.children = append(n.children, child)
}
}
func countParams(path string) uint16 {
var n uint16
s := bytesconv.StringToBytes(path)
n += uint16(bytes.Count(s, strColon))
n += uint16(bytes.Count(s, strStar))
return n
}
func countSections(path string) uint16 {
s := bytesconv.StringToBytes(path)
return uint16(bytes.Count(s, strSlash))
}
type nodeType uint8
const (
static nodeType = iota
root
param
catchAll
)
type node struct {
path string
indices string
wildChild bool
nType nodeType
priority uint32
children []*node // child nodes, at most 1 :param style node at the end of the array
handlers []HandlerFunc
fullPath string
}
// Increments priority of the given child and reorders if necessary
func (n *node) incrementChildPrio(pos int) int {
cs := n.children
cs[pos].priority++
prio := cs[pos].priority
// Adjust position (move to front)
newPos := pos
for ; newPos > 0 && cs[newPos-1].priority < prio; newPos-- {
// Swap node positions
cs[newPos-1], cs[newPos] = cs[newPos], cs[newPos-1]
}
// Build new index char string
if newPos != pos {
n.indices = n.indices[:newPos] + // Unchanged prefix, might be empty
n.indices[pos:pos+1] + // The index char we move
n.indices[newPos:pos] + n.indices[pos+1:] // Rest without char at 'pos'
}
return newPos
}
// addRoute adds a node with the given handle to the path.
// Not concurrency-safe!
func (n *node) addRoute(path string, handlers ...HandlerFunc) {
fullPath := path
n.priority++
// Empty tree
if len(n.path) == 0 && len(n.children) == 0 {
n.insertChild(path, fullPath, handlers...)
n.nType = root
return
}
parentFullPathIndex := 0
walk:
for {
// Find the longest common prefix.
// This also implies that the common prefix contains no ':' or '*'
// since the existing key can't contain those chars.
i := longestCommonPrefix(path, n.path)
// Split edge
if i < len(n.path) {
child := node{
path: n.path[i:],
wildChild: n.wildChild,
nType: static,
indices: n.indices,
children: n.children,
handlers: n.handlers,
priority: n.priority - 1,
fullPath: n.fullPath,
}
n.children = []*node{&child}
// []byte for proper unicode char conversion, see #65
n.indices = bytesconv.BytesToString([]byte{n.path[i]})
n.path = path[:i]
n.handlers = nil
n.wildChild = false
n.fullPath = fullPath[:parentFullPathIndex+i]
}
// Make new node a child of this node
if i < len(path) {
path = path[i:]
c := path[0]
// '/' after param
if n.nType == param && c == '/' && len(n.children) == 1 {
parentFullPathIndex += len(n.path)
n = n.children[0]
n.priority++
continue walk
}
// Check if a child with the next path byte exists
for i, max := 0, len(n.indices); i < max; i++ {
if c == n.indices[i] {
parentFullPathIndex += len(n.path)
i = n.incrementChildPrio(i)
n = n.children[i]
continue walk
}
}
// Otherwise insert it
if c != ':' && c != '*' && n.nType != catchAll {
// []byte for proper unicode char conversion, see #65
n.indices += bytesconv.BytesToString([]byte{c})
child := &node{
fullPath: fullPath,
}
n.addChild(child)
n.incrementChildPrio(len(n.indices) - 1)
n = child
} else if n.wildChild {
// inserting a wildcard node, need to check if it conflicts with the existing wildcard
n = n.children[len(n.children)-1]
n.priority++
// Check if the wildcard matches
if len(path) >= len(n.path) && n.path == path[:len(n.path)] &&
// Adding a child to a catchAll is not possible
n.nType != catchAll &&
// Check for longer wildcard, e.g. :name and :names
(len(n.path) >= len(path) || path[len(n.path)] == '/') {
continue walk
}
// Wildcard conflict
pathSeg := path
if n.nType != catchAll {
pathSeg = strings.SplitN(pathSeg, "/", 2)[0]
}
prefix := fullPath[:strings.Index(fullPath, pathSeg)] + n.path
panic("'" + pathSeg +
"' in new path '" + fullPath +
"' conflicts with existing wildcard '" + n.path +
"' in existing prefix '" + prefix +
"'")
}
n.insertChild(path, fullPath, handlers...)
return
}
// Otherwise add handle to current node
if n.handlers != nil {
panic("handlers are already registered for path '" + fullPath + "'")
}
n.handlers = handlers
n.fullPath = fullPath
return
}
}
// Search for a wildcard segment and check the name for invalid characters.
// Returns -1 as index, if no wildcard was found.
func findWildcard(path string) (wildcard string, i int, valid bool) {
// Find start
for start, c := range []byte(path) {
// A wildcard starts with ':' (param) or '*' (catch-all)
if c != ':' && c != '*' {
continue
}
// Find end and check for invalid characters
valid = true
for end, c := range []byte(path[start+1:]) {
switch c {
case '/':
return path[start : start+1+end], start, valid
case ':', '*':
valid = false
}
}
return path[start:], start, valid
}
return "", -1, false
}
func (n *node) insertChild(path string, fullPath string, handlers ...HandlerFunc) {
for {
// Find prefix until first wildcard
wildcard, i, valid := findWildcard(path)
if i < 0 { // No wildcard found
break
}
// The wildcard name must only contain one ':' or '*' character
if !valid {
panic("only one wildcard per path segment is allowed, has: '" +
wildcard + "' in path '" + fullPath + "'")
}
// check if the wildcard has a name
if len(wildcard) < 2 {
panic("wildcards must be named with a non-empty name in path '" + fullPath + "'")
}
if wildcard[0] == ':' { // param
if i > 0 {
// Insert prefix before the current wildcard
n.path = path[:i]
path = path[i:]
}
child := &node{
nType: param,
path: wildcard,
fullPath: fullPath,
}
n.addChild(child)
n.wildChild = true
n = child
n.priority++
// if the path doesn't end with the wildcard, then there
// will be another subpath starting with '/'
if len(wildcard) < len(path) {
path = path[len(wildcard):]
child := &node{
priority: 1,
fullPath: fullPath,
}
n.addChild(child)
n = child
continue
}
// Otherwise we're done. Insert the handle in the new leaf
n.handlers = handlers
return
}
// catchAll
if i+len(wildcard) != len(path) {
panic("catch-all routes are only allowed at the end of the path in path '" + fullPath + "'")
}
if len(n.path) > 0 && n.path[len(n.path)-1] == '/' {
pathSeg := ""
if len(n.children) != 0 {
pathSeg = strings.SplitN(n.children[0].path, "/", 2)[0]
}
panic("catch-all wildcard '" + path +
"' in new path '" + fullPath +
"' conflicts with existing path segment '" + pathSeg +
"' in existing prefix '" + n.path + pathSeg +
"'")
}
// currently fixed width 1 for '/'
i--
if path[i] != '/' {
panic("no / before catch-all in path '" + fullPath + "'")
}
n.path = path[:i]
// First node: catchAll node with empty path
child := &node{
wildChild: true,
nType: catchAll,
fullPath: fullPath,
}
n.addChild(child)
n.indices = string('/')
n = child
n.priority++
// second node: node holding the variable
child = &node{
path: path[i:],
nType: catchAll,
handlers: handlers,
priority: 1,
fullPath: fullPath,
}
n.children = []*node{child}
return
}
// If no wildcard was found, simply insert the path and handle
n.path = path
n.handlers = handlers
n.fullPath = fullPath
}
// nodeValue holds return values of (*Node).getValue method
type nodeValue struct {
handlers []HandlerFunc
params *Params
tsr bool
fullPath string
}
type skippedNode struct {
path string
node *node
paramsCount int16
}
// Returns the handle registered with the given path (key). The values of
// wildcards are saved to a map.
// If no handle can be found, a TSR (trailing slash redirect) recommendation is
// made if a handle exists with an extra (without the) trailing slash for the
// given path.
func (n *node) getValue(path string, params *Params, skippedNodes *[]skippedNode, unescape bool) (value nodeValue) {
var globalParamsCount int16
walk: // Outer loop for walking the tree
for {
prefix := n.path
if len(path) > len(prefix) {
if path[:len(prefix)] == prefix {
path = path[len(prefix):]
// Try all the non-wildcard children first by matching the indices
idxc := path[0]
for i, c := range []byte(n.indices) {
if c == idxc {
// strings.HasPrefix(n.children[len(n.children)-1].path, ":") == n.wildChild
if n.wildChild {
index := len(*skippedNodes)
*skippedNodes = (*skippedNodes)[:index+1]
(*skippedNodes)[index] = skippedNode{
path: prefix + path,
node: &node{
path: n.path,
wildChild: n.wildChild,
nType: n.nType,
priority: n.priority,
children: n.children,
handlers: n.handlers,
fullPath: n.fullPath,
},
paramsCount: globalParamsCount,
}
}
n = n.children[i]
continue walk
}
}
if !n.wildChild {
// If the path at the end of the loop is not equal to '/' and the current node has no child nodes
// the current node needs to roll back to last valid skippedNode
if path != "/" {
for length := len(*skippedNodes); length > 0; length-- {
skippedNode := (*skippedNodes)[length-1]
*skippedNodes = (*skippedNodes)[:length-1]
if strings.HasSuffix(skippedNode.path, path) {
path = skippedNode.path
n = skippedNode.node
if value.params != nil {
*value.params = (*value.params)[:skippedNode.paramsCount]
}
globalParamsCount = skippedNode.paramsCount
continue walk
}
}
}
// Nothing found.
// We can recommend to redirect to the same URL without a
// trailing slash if a leaf exists for that path.
value.tsr = path == "/" && n.handlers != nil
return value
}
// Handle wildcard child, which is always at the end of the array
n = n.children[len(n.children)-1]
globalParamsCount++
switch n.nType {
case param:
// fix truncate the parameter
// tree_test.go line: 204
// Find param end (either '/' or path end)
end := 0
for end < len(path) && path[end] != '/' {
end++
}
// Save param value
if params != nil {
// Preallocate capacity if necessary
if cap(*params) < int(globalParamsCount) {
newParams := make(Params, len(*params), globalParamsCount)
copy(newParams, *params)
*params = newParams
}
if value.params == nil {
value.params = params
}
// Expand slice within preallocated capacity
i := len(*value.params)
*value.params = (*value.params)[:i+1]
val := path[:end]
if unescape {
if v, err := url.QueryUnescape(val); err == nil {
val = v
}
}
(*value.params)[i] = Param{
Key: n.path[1:],
Value: val,
}
}
// we need to go deeper!
if end < len(path) {
if len(n.children) > 0 {
path = path[end:]
n = n.children[0]
continue walk
}
// ... but we can't
value.tsr = len(path) == end+1
return value
}
if value.handlers = n.handlers; value.handlers != nil {
value.fullPath = n.fullPath
return value
}
if len(n.children) == 1 {
// No handle found. Check if a handle for this path + a
// trailing slash exists for TSR recommendation
n = n.children[0]
value.tsr = (n.path == "/" && n.handlers != nil) || (n.path == "" && n.indices == "/")
}
return value
case catchAll:
// Save param value
if params != nil {
// Preallocate capacity if necessary
if cap(*params) < int(globalParamsCount) {
newParams := make(Params, len(*params), globalParamsCount)
copy(newParams, *params)
*params = newParams
}
if value.params == nil {
value.params = params
}
// Expand slice within preallocated capacity
i := len(*value.params)
*value.params = (*value.params)[:i+1]
val := path
if unescape {
if v, err := url.QueryUnescape(path); err == nil {
val = v
}
}
(*value.params)[i] = Param{
Key: n.path[2:],
Value: val,
}
}
value.handlers = n.handlers
value.fullPath = n.fullPath
return value
default:
panic("invalid node type")
}
}
}
if path == prefix {
// If the current path does not equal '/' and the node does not have a registered handle and the most recently matched node has a child node
// the current node needs to roll back to last valid skippedNode
if n.handlers == nil && path != "/" {
for length := len(*skippedNodes); length > 0; length-- {
skippedNode := (*skippedNodes)[length-1]
*skippedNodes = (*skippedNodes)[:length-1]
if strings.HasSuffix(skippedNode.path, path) {
path = skippedNode.path
n = skippedNode.node
if value.params != nil {
*value.params = (*value.params)[:skippedNode.paramsCount]
}
globalParamsCount = skippedNode.paramsCount
continue walk
}
}
// n = latestNode.children[len(latestNode.children)-1]
}
// We should have reached the node containing the handle.
// Check if this node has a handle registered.
if value.handlers = n.handlers; value.handlers != nil {
value.fullPath = n.fullPath
return value
}
// If there is no handle for this route, but this route has a
// wildcard child, there must be a handle for this path with an
// additional trailing slash
if path == "/" && n.wildChild && n.nType != root {
value.tsr = true
return value
}
if path == "/" && n.nType == static {
value.tsr = true
return value
}
// No handle found. Check if a handle for this path + a
// trailing slash exists for trailing slash recommendation
for i, c := range []byte(n.indices) {
if c == '/' {
n = n.children[i]
value.tsr = (len(n.path) == 1 && n.handlers != nil) ||
(n.nType == catchAll && n.children[0].handlers != nil)
return value
}
}
return value
}
// Nothing found. We can recommend to redirect to the same URL with an
// extra trailing slash if a leaf exists for that path
value.tsr = path == "/" ||
(len(prefix) == len(path)+1 && prefix[len(path)] == '/' &&
path == prefix[:len(prefix)-1] && n.handlers != nil)
// roll back to last valid skippedNode
if !value.tsr && path != "/" {
for length := len(*skippedNodes); length > 0; length-- {
skippedNode := (*skippedNodes)[length-1]
*skippedNodes = (*skippedNodes)[:length-1]
if strings.HasSuffix(skippedNode.path, path) {
path = skippedNode.path
n = skippedNode.node
if value.params != nil {
*value.params = (*value.params)[:skippedNode.paramsCount]
}
globalParamsCount = skippedNode.paramsCount
continue walk
}
}
}
return value
}
}
// Makes a case-insensitive lookup of the given path and tries to find a handler.
// It can optionally also fix trailing slashes.
// It returns the case-corrected path and a bool indicating whether the lookup
// was successful.
func (n *node) findCaseInsensitivePath(path string, fixTrailingSlash bool) ([]byte, bool) {
const stackBufSize = 128
// Use a static sized buffer on the stack in the common case.
// If the path is too long, allocate a buffer on the heap instead.
buf := make([]byte, 0, stackBufSize)
if length := len(path) + 1; length > stackBufSize {
buf = make([]byte, 0, length)
}
ciPath := n.findCaseInsensitivePathRec(
path,
buf, // Preallocate enough memory for new path
[4]byte{}, // Empty rune buffer
fixTrailingSlash,
)
return ciPath, ciPath != nil
}
// Shift bytes in array by n bytes left
func shiftNRuneBytes(rb [4]byte, n int) [4]byte {
switch n {
case 0:
return rb
case 1:
return [4]byte{rb[1], rb[2], rb[3], 0}
case 2:
return [4]byte{rb[2], rb[3]}
case 3:
return [4]byte{rb[3]}
default:
return [4]byte{}
}
}
// Recursive case-insensitive lookup function used by n.findCaseInsensitivePath
func (n *node) findCaseInsensitivePathRec(path string, ciPath []byte, rb [4]byte, fixTrailingSlash bool) []byte {
npLen := len(n.path)
walk: // Outer loop for walking the tree
for len(path) >= npLen && (npLen == 0 || strings.EqualFold(path[1:npLen], n.path[1:])) {
// Add common prefix to result
oldPath := path
path = path[npLen:]
ciPath = append(ciPath, n.path...)
if len(path) == 0 {
// We should have reached the node containing the handle.
// Check if this node has a handle registered.
if n.handlers != nil {
return ciPath
}
// No handle found.
// Try to fix the path by adding a trailing slash
if fixTrailingSlash {
for i, c := range []byte(n.indices) {
if c == '/' {
n = n.children[i]
if (len(n.path) == 1 && n.handlers != nil) ||
(n.nType == catchAll && n.children[0].handlers != nil) {
return append(ciPath, '/')
}
return nil
}
}
}
return nil
}
// If this node does not have a wildcard (param or catchAll) child,
// we can just look up the next child node and continue to walk down
// the tree
if !n.wildChild {
// Skip rune bytes already processed
rb = shiftNRuneBytes(rb, npLen)
if rb[0] != 0 {
// Old rune not finished
idxc := rb[0]
for i, c := range []byte(n.indices) {
if c == idxc {
// continue with child node
n = n.children[i]
npLen = len(n.path)
continue walk
}
}
} else {
// Process a new rune
var rv rune
// Find rune start.
// Runes are up to 4 byte long,
// -4 would definitely be another rune.
var off int
for max := min(npLen, 3); off < max; off++ {
if i := npLen - off; utf8.RuneStart(oldPath[i]) {
// read rune from cached path
rv, _ = utf8.DecodeRuneInString(oldPath[i:])
break
}
}
// Calculate lowercase bytes of current rune
lo := unicode.ToLower(rv)
utf8.EncodeRune(rb[:], lo)
// Skip already processed bytes
rb = shiftNRuneBytes(rb, off)
idxc := rb[0]
for i, c := range []byte(n.indices) {
// Lowercase matches
if c == idxc {
// must use a recursive approach since both the
// uppercase byte and the lowercase byte might exist
// as an index
if out := n.children[i].findCaseInsensitivePathRec(
path, ciPath, rb, fixTrailingSlash,
); out != nil {
return out
}
break
}
}
// If we found no match, the same for the uppercase rune,
// if it differs
if up := unicode.ToUpper(rv); up != lo {
utf8.EncodeRune(rb[:], up)
rb = shiftNRuneBytes(rb, off)
idxc := rb[0]
for i, c := range []byte(n.indices) {
// Uppercase matches
if c == idxc {
// Continue with child node
n = n.children[i]
npLen = len(n.path)
continue walk
}
}
}
}
// Nothing found. We can recommend to redirect to the same URL
// without a trailing slash if a leaf exists for that path
if fixTrailingSlash && path == "/" && n.handlers != nil {
return ciPath
}
return nil
}
n = n.children[0]
switch n.nType {
case param:
// Find param end (either '/' or path end)
end := 0
for end < len(path) && path[end] != '/' {
end++
}
// Add param value to case insensitive path
ciPath = append(ciPath, path[:end]...)
// We need to go deeper!
if end < len(path) {
if len(n.children) > 0 {
// Continue with child node
n = n.children[0]
npLen = len(n.path)
path = path[end:]
continue
}
// ... but we can't
if fixTrailingSlash && len(path) == end+1 {
return ciPath
}
return nil
}
if n.handlers != nil {
return ciPath
}
if fixTrailingSlash && len(n.children) == 1 {
// No handle found. Check if a handle for this path + a
// trailing slash exists
n = n.children[0]
if n.path == "/" && n.handlers != nil {
return append(ciPath, '/')
}
}
return nil
case catchAll:
return append(ciPath, path...)
default:
panic("invalid node type")
}
}
// Nothing found.
// Try to fix the path by adding / removing a trailing slash
if fixTrailingSlash {
if path == "/" {
return ciPath
}
if len(path)+1 == npLen && n.path[len(path)] == '/' &&
strings.EqualFold(path[1:], n.path[1:len(path)]) && n.handlers != nil {
return append(ciPath, n.path...)
}
}
return nil
}

226
pkg/api/util.go Normal file
View File

@ -0,0 +1,226 @@
package api
import (
"fmt"
"strings"
"github.com/loveuer/upp/internal/schema"
)
const (
MIMETextXML = "text/xml"
MIMETextHTML = "text/html"
MIMETextPlain = "text/plain"
MIMETextJavaScript = "text/javascript"
MIMEApplicationXML = "application/xml"
MIMEApplicationJSON = "application/json"
MIMEApplicationForm = "application/x-www-form-urlencoded"
MIMEOctetStream = "application/octet-stream"
MIMEMultipartForm = "multipart/form-data"
MIMETextXMLCharsetUTF8 = "text/xml; charset=utf-8"
MIMETextHTMLCharsetUTF8 = "text/html; charset=utf-8"
MIMETextPlainCharsetUTF8 = "text/plain; charset=utf-8"
MIMETextJavaScriptCharsetUTF8 = "text/javascript; charset=utf-8"
MIMEApplicationXMLCharsetUTF8 = "application/xml; charset=utf-8"
MIMEApplicationJSONCharsetUTF8 = "application/json; charset=utf-8"
// Deprecated: use MIMETextJavaScriptCharsetUTF8 instead
MIMEApplicationJavaScriptCharsetUTF8 = "application/javascript; charset=utf-8"
)
// parseVendorSpecificContentType check if content type is vendor specific and
// if it is parsable to any known types. If it's not vendor specific then returns
// the original content type.
func parseVendorSpecificContentType(cType string) string {
plusIndex := strings.Index(cType, "+")
if plusIndex == -1 {
return cType
}
var parsableType string
if semiColonIndex := strings.Index(cType, ";"); semiColonIndex == -1 {
parsableType = cType[plusIndex+1:]
} else if plusIndex < semiColonIndex {
parsableType = cType[plusIndex+1 : semiColonIndex]
} else {
return cType[:semiColonIndex]
}
slashIndex := strings.Index(cType, "/")
if slashIndex == -1 {
return cType
}
return cType[0:slashIndex+1] + parsableType
}
func parseToStruct(aliasTag string, out interface{}, data map[string][]string) error {
schemaDecoder := schema.NewDecoder()
schemaDecoder.SetAliasTag(aliasTag)
if err := schemaDecoder.Decode(out, data); err != nil {
return fmt.Errorf("failed to decode: %w", err)
}
return nil
}
func elsePanic(guard bool, text string) {
if !guard {
panic(text)
}
}
func cleanPath(p string) string {
const stackBufSize = 128
// Turn empty string into "/"
if p == "" {
return "/"
}
// Reasonably sized buffer on stack to avoid allocations in the common case.
// If a larger buffer is required, it gets allocated dynamically.
buf := make([]byte, 0, stackBufSize)
n := len(p)
// Invariants:
// reading from path; r is index of next byte to process.
// writing to buf; w is index of next byte to write.
// path must start with '/'
r := 1
w := 1
if p[0] != '/' {
r = 0
if n+1 > stackBufSize {
buf = make([]byte, n+1)
} else {
buf = buf[:n+1]
}
buf[0] = '/'
}
trailing := n > 1 && p[n-1] == '/'
// A bit more clunky without a 'lazybuf' like the path package, but the loop
// gets completely inlined (bufApp calls).
// loop has no expensive function calls (except 1x make) // So in contrast to the path package this loop has no expensive function
// calls (except make, if needed).
for r < n {
switch {
case p[r] == '/':
// empty path element, trailing slash is added after the end
r++
case p[r] == '.' && r+1 == n:
trailing = true
r++
case p[r] == '.' && p[r+1] == '/':
// . element
r += 2
case p[r] == '.' && p[r+1] == '.' && (r+2 == n || p[r+2] == '/'):
// .. element: remove to last /
r += 3
if w > 1 {
// can backtrack
w--
if len(buf) == 0 {
for w > 1 && p[w] != '/' {
w--
}
} else {
for w > 1 && buf[w] != '/' {
w--
}
}
}
default:
// Real path element.
// Add slash if needed
if w > 1 {
bufApp(&buf, p, w, '/')
w++
}
// Copy element
for r < n && p[r] != '/' {
bufApp(&buf, p, w, p[r])
w++
r++
}
}
}
// Re-append trailing slash
if trailing && w > 1 {
bufApp(&buf, p, w, '/')
w++
}
// If the original string was not modified (or only shortened at the end),
// return the respective substring of the original string.
// Otherwise return a new string from the buffer.
if len(buf) == 0 {
return p[:w]
}
return string(buf[:w])
}
// Internal helper to lazily create a buffer if necessary.
// Calls to this function get inlined.
func bufApp(buf *[]byte, s string, w int, c byte) {
b := *buf
if len(b) == 0 {
// No modification of the original string so far.
// If the next character is the same as in the original string, we do
// not yet have to allocate a buffer.
if s[w] == c {
return
}
// Otherwise use either the stack buffer, if it is large enough, or
// allocate a new buffer on the heap, and copy all previous characters.
length := len(s)
if length > cap(b) {
*buf = make([]byte, length)
} else {
*buf = (*buf)[:length]
}
b = *buf
copy(b, s[:w])
}
b[w] = c
}
func HumanDuration(nano int64) string {
duration := float64(nano)
unit := "ns"
if duration >= 1000 {
duration /= 1000
unit = "us"
}
if duration >= 1000 {
duration /= 1000
unit = "ms"
}
if duration >= 1000 {
duration /= 1000
unit = " s"
}
return fmt.Sprintf("%6.2f%s", duration, unit)
}

60
pkg/cache/cache.go vendored Normal file
View File

@ -0,0 +1,60 @@
package cache
import (
"context"
"encoding/json"
"errors"
"time"
)
type Cache interface {
Get(ctx context.Context, key string) ([]byte, error)
Gets(ctx context.Context, keys ...string) ([][]byte, error)
GetScan(ctx context.Context, key string) Scanner
GetEx(ctx context.Context, key string, duration time.Duration) ([]byte, error)
GetExScan(ctx context.Context, key string, duration time.Duration) Scanner
// Set value 会被序列化, 优先使用 MarshalBinary 方法, 没有则执行 json.Marshal
Set(ctx context.Context, key string, value any) error
Sets(ctx context.Context, vm map[string]any) error
// SetEx value 会被序列化, 优先使用 MarshalBinary 方法, 没有则执行 json.Marshal
SetEx(ctx context.Context, key string, value any, duration time.Duration) error
Del(ctx context.Context, keys ...string) error
}
type Scanner interface {
Scan(model any) error
}
type encoded_value interface {
MarshalBinary() ([]byte, error)
}
type decoded_value interface {
UnmarshalBinary(bs []byte) error
}
const (
Prefix = "upp:"
)
var ErrorKeyNotFound = errors.New("key not found")
func handleValue(value any) ([]byte, error) {
var (
bs []byte
err error
)
switch value.(type) {
case []byte:
return value.([]byte), nil
}
if imp, ok := value.(encoded_value); ok {
bs, err = imp.MarshalBinary()
} else {
bs, err = json.Marshal(value)
}
return bs, err
}

141
pkg/cache/cache.lru.go vendored Normal file
View File

@ -0,0 +1,141 @@
package cache
import (
"context"
"time"
"github.com/hashicorp/golang-lru/v2/expirable"
_ "github.com/hashicorp/golang-lru/v2/expirable"
)
var _ Cache = (*_lru)(nil)
type _lru struct {
client *expirable.LRU[string, *_lru_value]
}
type _lru_value struct {
duration time.Duration
last time.Time
bs []byte
}
func (l *_lru) Get(ctx context.Context, key string) ([]byte, error) {
v, ok := l.client.Get(key)
if !ok {
return nil, ErrorKeyNotFound
}
if v.duration == 0 {
return v.bs, nil
}
if time.Now().Sub(v.last) > v.duration {
l.client.Remove(key)
return nil, ErrorKeyNotFound
}
return v.bs, nil
}
func (l *_lru) Gets(ctx context.Context, keys ...string) ([][]byte, error) {
bss := make([][]byte, 0, len(keys))
for _, key := range keys {
bs, err := l.Get(ctx, key)
if err != nil {
return nil, err
}
bss = append(bss, bs)
}
return bss, nil
}
func (l *_lru) GetScan(ctx context.Context, key string) Scanner {
return newScanner(l.Get(ctx, key))
}
func (l *_lru) GetEx(ctx context.Context, key string, duration time.Duration) ([]byte, error) {
v, ok := l.client.Get(key)
if !ok {
return nil, ErrorKeyNotFound
}
if v.duration == 0 {
return v.bs, nil
}
now := time.Now()
if now.Sub(v.last) > v.duration {
l.client.Remove(key)
return nil, ErrorKeyNotFound
}
l.client.Add(key, &_lru_value{
duration: duration,
last: now,
bs: v.bs,
})
return v.bs, nil
}
func (l *_lru) GetExScan(ctx context.Context, key string, duration time.Duration) Scanner {
return newScanner(l.GetEx(ctx, key, duration))
}
func (l *_lru) Set(ctx context.Context, key string, value any) error {
bs, err := handleValue(value)
if err != nil {
return err
}
l.client.Add(key, &_lru_value{
duration: 0,
last: time.Now(),
bs: bs,
})
return nil
}
func (l *_lru) SetEx(ctx context.Context, key string, value any, duration time.Duration) error {
bs, err := handleValue(value)
if err != nil {
return err
}
l.client.Add(key, &_lru_value{
duration: duration,
last: time.Now(),
bs: bs,
})
return nil
}
func (l *_lru) Sets(ctx context.Context, m map[string]any) error {
for k, v := range m {
if err := l.Set(ctx, k, v); err != nil {
return err
}
}
return nil
}
func (l *_lru) Del(ctx context.Context, keys ...string) error {
for _, key := range keys {
l.client.Remove(key)
}
return nil
}
func newLRUCache() (Cache, error) {
client := expirable.NewLRU[string, *_lru_value](1024*1024, nil, 0)
return &_lru{client: client}, nil
}

105
pkg/cache/cache.memory.go vendored Normal file
View File

@ -0,0 +1,105 @@
package cache
import (
"context"
"errors"
"fmt"
"time"
"gitea.com/loveuer/gredis"
)
var _ Cache = (*_mem)(nil)
type _mem struct {
client *gredis.Gredis
}
func (m *_mem) GetScan(ctx context.Context, key string) Scanner {
return newScanner(m.Get(ctx, key))
}
func (m *_mem) GetExScan(ctx context.Context, key string, duration time.Duration) Scanner {
return newScanner(m.GetEx(ctx, key, duration))
}
func (m *_mem) Get(ctx context.Context, key string) ([]byte, error) {
v, err := m.client.Get(key)
if err != nil {
if errors.Is(err, gredis.ErrKeyNotFound) {
return nil, ErrorKeyNotFound
}
return nil, err
}
bs, ok := v.([]byte)
if !ok {
return nil, fmt.Errorf("invalid value type=%T", v)
}
return bs, nil
}
func (m *_mem) Gets(ctx context.Context, keys ...string) ([][]byte, error) {
bss := make([][]byte, 0, len(keys))
for _, key := range keys {
bs, err := m.Get(ctx, key)
if err != nil {
return nil, err
}
bss = append(bss, bs)
}
return bss, nil
}
func (m *_mem) GetEx(ctx context.Context, key string, duration time.Duration) ([]byte, error) {
v, err := m.client.GetEx(key, duration)
if err != nil {
if errors.Is(err, gredis.ErrKeyNotFound) {
return nil, ErrorKeyNotFound
}
return nil, err
}
bs, ok := v.([]byte)
if !ok {
return nil, fmt.Errorf("invalid value type=%T", v)
}
return bs, nil
}
func (m *_mem) Set(ctx context.Context, key string, value any) error {
bs, err := handleValue(value)
if err != nil {
return err
}
return m.client.Set(key, bs)
}
func (m *_mem) Sets(ctx context.Context, vm map[string]any) error {
for k, v := range vm {
if err := m.Set(ctx, k, v); err != nil {
return err
}
}
return nil
}
func (m *_mem) SetEx(ctx context.Context, key string, value any, duration time.Duration) error {
bs, err := handleValue(value)
if err != nil {
return err
}
return m.client.SetEx(key, bs, duration)
}
func (m *_mem) Del(ctx context.Context, keys ...string) error {
m.client.Delete(keys...)
return nil
}

106
pkg/cache/cache.redis.go vendored Normal file
View File

@ -0,0 +1,106 @@
package cache
import (
"context"
"errors"
"time"
"github.com/go-redis/redis/v8"
"github.com/samber/lo"
"github.com/spf13/cast"
)
type _redis struct {
client *redis.Client
}
func (r *_redis) Get(ctx context.Context, key string) ([]byte, error) {
result, err := r.client.Get(ctx, key).Result()
if err != nil {
if errors.Is(err, redis.Nil) {
return nil, ErrorKeyNotFound
}
return nil, err
}
return []byte(result), nil
}
func (r *_redis) Gets(ctx context.Context, keys ...string) ([][]byte, error) {
result, err := r.client.MGet(ctx, keys...).Result()
if err != nil {
if errors.Is(err, redis.Nil) {
return nil, ErrorKeyNotFound
}
return nil, err
}
return lo.Map(
result,
func(item any, index int) []byte {
return []byte(cast.ToString(item))
},
), nil
}
func (r *_redis) GetScan(ctx context.Context, key string) Scanner {
return newScanner(r.Get(ctx, key))
}
func (r *_redis) GetEx(ctx context.Context, key string, duration time.Duration) ([]byte, error) {
result, err := r.client.GetEx(ctx, key, duration).Result()
if err != nil {
if errors.Is(err, redis.Nil) {
return nil, ErrorKeyNotFound
}
return nil, err
}
return []byte(result), nil
}
func (r *_redis) GetExScan(ctx context.Context, key string, duration time.Duration) Scanner {
return newScanner(r.GetEx(ctx, key, duration))
}
func (r *_redis) Set(ctx context.Context, key string, value any) error {
bs, err := handleValue(value)
if err != nil {
return err
}
_, err = r.client.Set(ctx, key, bs, redis.KeepTTL).Result()
return err
}
func (r *_redis) Sets(ctx context.Context, values map[string]any) error {
vm := make(map[string]any)
for k, v := range values {
bs, err := handleValue(v)
if err != nil {
return err
}
vm[k] = bs
}
return r.client.MSet(ctx, vm).Err()
}
func (r *_redis) SetEx(ctx context.Context, key string, value any, duration time.Duration) error {
bs, err := handleValue(value)
if err != nil {
return err
}
_, err = r.client.SetEX(ctx, key, bs, duration).Result()
return err
}
func (r *_redis) Del(ctx context.Context, keys ...string) error {
return r.client.Del(ctx, keys...).Err()
}

70
pkg/cache/new.go vendored Normal file
View File

@ -0,0 +1,70 @@
package cache
import (
"fmt"
"net/url"
"strings"
"gitea.com/loveuer/gredis"
"github.com/go-redis/redis/v8"
"github.com/loveuer/upp/pkg/tool"
)
func New(uri string) (Cache, error) {
var (
client Cache
err error
)
strs := strings.Split(uri, "::")
switch strs[0] {
case "memory":
gc := gredis.NewGredis(1024 * 1024)
client = &_mem{client: gc}
case "lru":
if client, err = newLRUCache(); err != nil {
return nil, err
}
case "redis":
var (
ins *url.URL
err error
)
if len(strs) != 2 {
return nil, fmt.Errorf("cache.Init: invalid cache uri: %s", uri)
}
uri := strs[1]
if !strings.Contains(uri, "://") {
uri = fmt.Sprintf("redis://%s", uri)
}
if ins, err = url.Parse(uri); err != nil {
return nil, fmt.Errorf("cache.Init: url parse cache uri: %s, err: %s", uri, err.Error())
}
addr := ins.Host
username := ins.User.Username()
password, _ := ins.User.Password()
var rc *redis.Client
rc = redis.NewClient(&redis.Options{
Addr: addr,
Username: username,
Password: password,
})
if err = rc.Ping(tool.Timeout(5)).Err(); err != nil {
return nil, fmt.Errorf("cache.Init: redis ping err: %s", err.Error())
}
client = &_redis{client: rc}
default:
return nil, fmt.Errorf("cache type %s not support", strs[0])
}
return client, nil
}

20
pkg/cache/scan.go vendored Normal file
View File

@ -0,0 +1,20 @@
package cache
import "encoding/json"
type scanner struct {
err error
bs []byte
}
func (s *scanner) Scan(model any) error {
if s.err != nil {
return s.err
}
return json.Unmarshal(s.bs, model)
}
func newScanner(bs []byte, err error) *scanner {
return &scanner{bs: bs, err: err}
}

58
pkg/db/new.go Normal file
View File

@ -0,0 +1,58 @@
package db
import (
"fmt"
"net/url"
"strings"
"github.com/glebarez/sqlite"
"github.com/loveuer/upp/pkg/log"
"gorm.io/driver/mysql"
"gorm.io/driver/postgres"
"gorm.io/gorm"
)
func New(uri string) (*gorm.DB, error) {
ins, err := url.Parse(uri)
if err != nil {
return nil, err
}
var (
username = ""
password = ""
tx *gorm.DB
)
if ins.User != nil {
username = ins.User.Username()
password, _ = ins.User.Password()
}
switch ins.Scheme {
case "sqlite":
path := strings.TrimPrefix(uri, ins.Scheme+"://")
log.Debug("db.New: type = %s, path = %s", ins.Scheme, path)
tx, err = gorm.Open(sqlite.Open(path))
case "mysql", "mariadb":
dsn := fmt.Sprintf("%s:%s@tcp(%s)/%s?%s", username, password, ins.Host, ins.Path, ins.RawQuery)
log.Debug("db.New: type = %s, dsn = %s", ins.Scheme, dsn)
tx, err = gorm.Open(mysql.Open(dsn))
case "pg", "postgres", "postgresql":
opts := make([]string, 0)
for key, val := range ins.Query() {
opts = append(opts, fmt.Sprintf("%s=%s", key, val))
}
dsn := fmt.Sprintf("host=%s user=%s password=%s dbname=%s port=%s %s", ins.Hostname(), username, password, ins.Path, ins.Port(), strings.Join(opts, " "))
log.Debug("db.New: type = %s, dsn = %s", ins.Scheme, dsn)
tx, err = gorm.Open(postgres.Open(dsn))
default:
return nil, fmt.Errorf("invalid database type(uri_scheme): %s", ins.Scheme)
}
if err != nil {
return nil, err
}
return tx, nil
}

77
pkg/es/client.go Normal file
View File

@ -0,0 +1,77 @@
package es
import (
"context"
"crypto/tls"
"fmt"
"net"
"net/http"
"net/url"
"strings"
"time"
elastic "github.com/elastic/go-elasticsearch/v7"
"github.com/elastic/go-elasticsearch/v7/esapi"
"github.com/loveuer/upp/pkg/tool"
"github.com/samber/lo"
)
// New elasticsearch client v7
// example:
// - uri: http://127.0.0.1:9200
// - uri: https://<username>:<password>@node1:9200,node2:9200,node3:9200
func New(ctx context.Context, uri string) (*elastic.Client, error) {
var (
err error
username string
password string
client *elastic.Client
ins *url.URL
)
if ins, err = url.Parse(uri); err != nil {
return nil, err
}
endpoints := lo.Map(
strings.Split(ins.Host, ","),
func(item string, index int) string {
return fmt.Sprintf("%s://%s", ins.Scheme, item)
},
)
if ins.User != nil {
username = ins.User.Username()
password, _ = ins.User.Password()
}
if client, err = elastic.NewClient(
elastic.Config{
Addresses: endpoints,
Username: username,
Password: password,
CACert: nil,
RetryOnStatus: []int{429},
MaxRetries: 3,
RetryBackoff: nil,
Transport: &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
DialContext: (&net.Dialer{Timeout: 10 * time.Second}).DialContext,
},
},
); err != nil {
return nil, err
}
var res *esapi.Response
if res, err = client.Ping(client.Ping.WithContext(tool.TimeoutCtx(ctx, 5))); err != nil {
return nil, err
}
if res.StatusCode != 200 {
err = fmt.Errorf("ping client response: %s", res.String())
return nil, err
}
return client, nil
}

View File

@ -0,0 +1,64 @@
package interfaces
import (
"context"
"mime/multipart"
"net"
)
type ApiGroup interface {
Group(path string, handlers ...ApiHandler) ApiGroup
GET(path string, handlers ...ApiHandler)
POST(path string, handlers ...ApiHandler)
PUT(path string, handlers ...ApiHandler)
DELETE(path string, handlers ...ApiHandler)
HEAD(path string, handlers ...ApiHandler)
PATCH(path string, handlers ...ApiHandler)
OPTIONS(path string, handlers ...ApiHandler)
Handle(method, path string, handlers ...ApiHandler)
Use(handlers ...ApiHandler)
}
type ApiEngine interface {
ApiGroup
Run(address string) error
RunListener(ln net.Listener) error
}
type ApiContext interface {
App() Upp
// parse body, form, json
BodyParser(out any) error
Context() context.Context
Cookie(string) string
FormFile(string) (*multipart.FileHeader, error)
FormValue(string) string
GetHeader(string) string
SetHeader(string, string)
IP() string
Json(any) error
Locals(interface{}, ...any) any
// get method or rewrite method
Method(string, ...string) string
MultipartForm() (*multipart.Form, error)
Param(string) string
// get path or rewrite path
Path(string, ...string) string
Next() error
Query(string) string
QueryParse(any) error
Redirect(string, ...int) error
// render html response
Render(name, layout string, data any) error
// set response status
Status(int)
SendStatus(int)
Write([]byte) (int, error)
}
type ApiHandler func(c ApiContext) error

10
pkg/interfaces/logger.go Normal file
View File

@ -0,0 +1,10 @@
package interfaces
type Logger interface {
Debug(string, ...any)
Info(string, ...any)
Warn(string, ...any)
Error(string, ...any)
Panic(string, ...any)
Fatal(string, ...any)
}

17
pkg/interfaces/upp.go Normal file
View File

@ -0,0 +1,17 @@
package interfaces
import (
"context"
"github.com/elastic/go-elasticsearch/v7"
"github.com/loveuer/upp/pkg/cache"
"gorm.io/gorm"
)
type Upp interface {
UseCtx() context.Context
UseDB(ctx ...context.Context) *gorm.DB
UseCache() cache.Cache
UseES() *elasticsearch.Client
UseLogger(ctxs ...context.Context) Logger
}

67
pkg/log/default.go Normal file
View File

@ -0,0 +1,67 @@
package log
import (
"fmt"
"os"
"sync"
)
var (
nilLogger = func(prefix, timestamp, msg string, data ...any) {}
normalLogger = func(prefix, timestamp, msg string, data ...any) {
fmt.Printf(prefix+"| "+timestamp+" | "+msg+"\n", data...)
}
panicLogger = func(prefix, timestamp, msg string, data ...any) {
panic(fmt.Sprintf(prefix+"| "+timestamp+" | "+msg+"\n", data...))
}
fatalLogger = func(prefix, timestamp, msg string, data ...any) {
fmt.Printf(prefix+"| "+timestamp+" | "+msg+"\n", data...)
os.Exit(1)
}
DefaultLogger = &logger{
Mutex: sync.Mutex{},
timeFormat: "2006-01-02T15:04:05",
writer: os.Stdout,
level: LogLevelInfo,
debug: nilLogger,
info: normalLogger,
warn: normalLogger,
error: normalLogger,
panic: panicLogger,
fatal: fatalLogger,
}
)
func SetTimeFormat(format string) {
DefaultLogger.SetTimeFormat(format)
}
func SetLogLevel(level LogLevel) {
DefaultLogger.SetLogLevel(level)
}
func Debug(msg string, data ...any) {
DefaultLogger.Debug(msg, data...)
}
func Info(msg string, data ...any) {
DefaultLogger.Info(msg, data...)
}
func Warn(msg string, data ...any) {
DefaultLogger.Warn(msg, data...)
}
func Error(msg string, data ...any) {
DefaultLogger.Error(msg, data...)
}
func Panic(msg string, data ...any) {
DefaultLogger.Panic(msg, data...)
}
func Fatal(msg string, data ...any) {
DefaultLogger.Fatal(msg, data...)
}

115
pkg/log/log.go Normal file
View File

@ -0,0 +1,115 @@
package log
import (
"github.com/fatih/color"
"io"
"sync"
"time"
)
type LogLevel uint32
const (
LogLevelDebug = iota
LogLevelInfo
LogLevelWarn
LogLevelError
LogLevelPanic
LogLevelFatal
)
type logger struct {
sync.Mutex
timeFormat string
writer io.Writer
level LogLevel
debug func(prefix, timestamp, msg string, data ...any)
info func(prefix, timestamp, msg string, data ...any)
warn func(prefix, timestamp, msg string, data ...any)
error func(prefix, timestamp, msg string, data ...any)
panic func(prefix, timestamp, msg string, data ...any)
fatal func(prefix, timestamp, msg string, data ...any)
}
var (
red = color.New(color.FgRed)
hired = color.New(color.FgHiRed)
green = color.New(color.FgGreen)
yellow = color.New(color.FgYellow)
white = color.New(color.FgWhite)
)
func (l *logger) SetTimeFormat(format string) {
l.Lock()
defer l.Unlock()
l.timeFormat = format
}
func (l *logger) SetLogLevel(level LogLevel) {
l.Lock()
defer l.Unlock()
if level > LogLevelDebug {
l.debug = nilLogger
} else {
l.debug = normalLogger
}
if level > LogLevelInfo {
l.info = nilLogger
} else {
l.info = normalLogger
}
if level > LogLevelWarn {
l.warn = nilLogger
} else {
l.warn = normalLogger
}
if level > LogLevelError {
l.error = nilLogger
} else {
l.error = normalLogger
}
if level > LogLevelPanic {
l.panic = nilLogger
} else {
l.panic = panicLogger
}
if level > LogLevelFatal {
l.fatal = nilLogger
} else {
l.fatal = fatalLogger
}
}
func (l *logger) Debug(msg string, data ...any) {
l.debug(white.Sprint("Debug "), time.Now().Format(l.timeFormat), msg, data...)
}
func (l *logger) Info(msg string, data ...any) {
l.info(green.Sprint("Info "), time.Now().Format(l.timeFormat), msg, data...)
}
func (l *logger) Warn(msg string, data ...any) {
l.warn(yellow.Sprint("Warn "), time.Now().Format(l.timeFormat), msg, data...)
}
func (l *logger) Error(msg string, data ...any) {
l.error(red.Sprint("Error "), time.Now().Format(l.timeFormat), msg, data...)
}
func (l *logger) Panic(msg string, data ...any) {
l.panic(hired.Sprint("Panic "), time.Now().Format(l.timeFormat), msg, data...)
}
func (l *logger) Fatal(msg string, data ...any) {
l.fatal(hired.Sprint("Fatal "), time.Now().Format(l.timeFormat), msg, data...)
}
type WroteLogger interface {
Info(msg string, data ...any)
}

21
pkg/log/new.go Normal file
View File

@ -0,0 +1,21 @@
package log
import (
"os"
"sync"
)
func New() *logger {
return &logger{
Mutex: sync.Mutex{},
timeFormat: "2006-01-02T15:04:05",
writer: os.Stdout,
level: LogLevelInfo,
debug: nilLogger,
info: normalLogger,
warn: normalLogger,
error: normalLogger,
panic: panicLogger,
fatal: fatalLogger,
}
}

38
pkg/tool/ctx.go Normal file
View File

@ -0,0 +1,38 @@
package tool
import (
"context"
"time"
)
func Timeout(seconds ...int) (ctx context.Context) {
var (
duration time.Duration
)
if len(seconds) > 0 && seconds[0] > 0 {
duration = time.Duration(seconds[0]) * time.Second
} else {
duration = time.Duration(30) * time.Second
}
ctx, _ = context.WithTimeout(context.Background(), duration)
return
}
func TimeoutCtx(ctx context.Context, seconds ...int) context.Context {
var (
duration time.Duration
)
if len(seconds) > 0 && seconds[0] > 0 {
duration = time.Duration(seconds[0]) * time.Second
} else {
duration = time.Duration(30) * time.Second
}
nctx, _ := context.WithTimeout(ctx, duration)
return nctx
}

24
pkg/tool/human.go Normal file
View File

@ -0,0 +1,24 @@
package tool
import "fmt"
func HumanDuration(nano int64) string {
duration := float64(nano)
unit := "ns"
if duration >= 1000 {
duration /= 1000
unit = "us"
}
if duration >= 1000 {
duration /= 1000
unit = "ms"
}
if duration >= 1000 {
duration /= 1000
unit = " s"
}
return fmt.Sprintf("%6.2f%s", duration, unit)
}

123
pkg/tool/loading/loading.go Normal file
View File

@ -0,0 +1,123 @@
package loading
import (
"context"
"fmt"
"time"
)
type Type int
const (
TypeProcessing Type = iota
TypeInfo
TypeSuccess
TypeWarning
TypeError
)
func (t Type) Symbol() string {
switch t {
case TypeSuccess:
return "✔️ "
case TypeWarning:
return "❗ "
case TypeError:
return "❌ "
case TypeInfo:
return "❕ "
default:
return ""
}
}
type _msg struct {
msg string
t Type
}
var frames = []string{"|", "/", "-", "\\"}
func Do(ctx context.Context, fn func(ctx context.Context, print func(msg string, types ...Type)) error) (err error) {
start := time.Now()
ch := make(chan *_msg)
defer func() {
fmt.Printf("\r\033[K")
}()
go func() {
var (
m *_msg
ok bool
processing string
)
for {
for _, frame := range frames {
select {
case <-ctx.Done():
return
case m, ok = <-ch:
if !ok || m == nil {
return
}
switch m.t {
case TypeProcessing:
if m.msg != "" {
processing = m.msg
}
case TypeInfo,
TypeSuccess,
TypeWarning,
TypeError:
// Clear the loading animation
fmt.Printf("\r\033[K")
fmt.Printf("%s%s\n", m.t.Symbol(), m.msg)
}
default:
elapsed := time.Since(start).Seconds()
if processing != "" {
fmt.Printf("\r\033[K%s %s (%.2fs)", frame, processing, elapsed)
}
time.Sleep(100 * time.Millisecond)
}
}
}
}()
printFn := func(msg string, types ...Type) {
if msg == "" {
return
}
m := &_msg{
msg: msg,
t: TypeProcessing,
}
if len(types) > 0 {
m.t = types[0]
}
ch <- m
}
done := make(chan struct{})
go func() {
if err = fn(ctx, printFn); err != nil {
ch <- &_msg{msg: err.Error(), t: TypeError}
}
close(ch)
done <- struct{}{}
}()
select {
case <-ctx.Done():
case <-done:
}
return err
}

11
pkg/tool/must.go Normal file
View File

@ -0,0 +1,11 @@
package tool
import "github.com/loveuer/upp/pkg/log"
func Must(errs ...error) {
for _, err := range errs {
if err != nil {
log.Panic(err.Error())
}
}
}

85
pkg/tool/password.go Normal file
View File

@ -0,0 +1,85 @@
package tool
import (
"crypto/sha256"
"encoding/hex"
"errors"
"fmt"
"regexp"
"strconv"
"strings"
"github.com/loveuer/upp/pkg/log"
"golang.org/x/crypto/pbkdf2"
)
const (
EncryptHeader string = "pbkdf2:sha256" // 用户密码加密
)
func NewPassword(password string) string {
return EncryptPassword(password, RandomString(8), int(RandomInt(50000)+100000))
}
func ComparePassword(in, db string) bool {
strs := strings.Split(db, "$")
if len(strs) != 3 {
log.Error("password in db invalid: %s", db)
return false
}
encs := strings.Split(strs[0], ":")
if len(encs) != 3 {
log.Error("password in db invalid: %s", db)
return false
}
encIteration, err := strconv.Atoi(encs[2])
if err != nil {
log.Error("password in db invalid: %s, convert iter err: %s", db, err)
return false
}
return EncryptPassword(in, strs[1], encIteration) == db
}
func EncryptPassword(password, salt string, iter int) string {
hash := pbkdf2.Key([]byte(password), []byte(salt), iter, 32, sha256.New)
encrypted := hex.EncodeToString(hash)
return fmt.Sprintf("%s:%d$%s$%s", EncryptHeader, iter, salt, encrypted)
}
func CheckPassword(password string) error {
if len(password) < 8 || len(password) > 32 {
return errors.New("密码长度不符合")
}
var (
err error
match bool
patternList = []string{`[0-9]+`, `[a-z]+`, `[A-Z]+`, `[!@#%]+`} //, `[~!@#$%^&*?_-]+`}
matchAccount = 0
tips = []string{"缺少数字", "缺少小写字母", "缺少大写字母", "缺少'!@#%'"}
locktips = make([]string, 0)
)
for idx, pattern := range patternList {
match, err = regexp.MatchString(pattern, password)
if err != nil {
log.Warn("regex match string err, reg_str: %s, err: %v", pattern, err)
return errors.New("密码强度不够")
}
if match {
matchAccount++
} else {
locktips = append(locktips, tips[idx])
}
}
if matchAccount < 3 {
return fmt.Errorf("密码强度不够, 可能 %s", strings.Join(locktips, ", "))
}
return nil
}

11
pkg/tool/password_test.go Normal file
View File

@ -0,0 +1,11 @@
package tool
import "testing"
func TestEncPassword(t *testing.T) {
password := "123456"
result := EncryptPassword(password, RandomString(8), 50000)
t.Logf("sum => %s", result)
}

54
pkg/tool/random.go Normal file
View File

@ -0,0 +1,54 @@
package tool
import (
"crypto/rand"
"math/big"
)
var (
letters = []byte("0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
letterNum = []byte("0123456789")
letterLow = []byte("abcdefghijklmnopqrstuvwxyz")
letterCap = []byte("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
letterSyb = []byte("!@#$%^&*()_+-=")
)
func RandomInt(max int64) int64 {
num, _ := rand.Int(rand.Reader, big.NewInt(max))
return num.Int64()
}
func RandomString(length int) string {
result := make([]byte, length)
for i := 0; i < length; i++ {
num, _ := rand.Int(rand.Reader, big.NewInt(int64(len(letters))))
result[i] = letters[num.Int64()]
}
return string(result)
}
func RandomPassword(length int, withSymbol bool) string {
result := make([]byte, length)
kind := 3
if withSymbol {
kind++
}
for i := 0; i < length; i++ {
switch i % kind {
case 0:
num, _ := rand.Int(rand.Reader, big.NewInt(int64(len(letterNum))))
result[i] = letterNum[num.Int64()]
case 1:
num, _ := rand.Int(rand.Reader, big.NewInt(int64(len(letterLow))))
result[i] = letterLow[num.Int64()]
case 2:
num, _ := rand.Int(rand.Reader, big.NewInt(int64(len(letterCap))))
result[i] = letterCap[num.Int64()]
case 3:
num, _ := rand.Int(rand.Reader, big.NewInt(int64(len(letterSyb))))
result[i] = letterSyb[num.Int64()]
}
}
return string(result)
}

125
pkg/tool/table.go Normal file
View File

@ -0,0 +1,125 @@
package tool
import (
"encoding/json"
"fmt"
"io"
"os"
"reflect"
"strings"
"github.com/jedib0t/go-pretty/v6/table"
"github.com/loveuer/upp/pkg/log"
)
func TablePrinter(data any, writers ...io.Writer) {
var w io.Writer = os.Stdout
if len(writers) > 0 && writers[0] != nil {
w = writers[0]
}
t := table.NewWriter()
structPrinter(t, "", data)
_, _ = fmt.Fprintln(w, t.Render())
}
func structPrinter(w table.Writer, prefix string, item any) {
Start:
rv := reflect.ValueOf(item)
if rv.IsZero() {
return
}
for rv.Type().Kind() == reflect.Pointer {
rv = rv.Elem()
}
switch rv.Type().Kind() {
case reflect.Invalid,
reflect.Uintptr,
reflect.Chan,
reflect.Func,
reflect.UnsafePointer:
case reflect.Bool,
reflect.Int,
reflect.Int8,
reflect.Int16,
reflect.Int32,
reflect.Int64,
reflect.Uint,
reflect.Uint8,
reflect.Uint16,
reflect.Uint32,
reflect.Uint64,
reflect.Float32,
reflect.Float64,
reflect.Complex64,
reflect.Complex128,
reflect.Interface:
w.AppendRow(table.Row{strings.TrimPrefix(prefix, "."), rv.Interface()})
case reflect.String:
val := rv.String()
if len(val) <= 160 {
w.AppendRow(table.Row{strings.TrimPrefix(prefix, "."), val})
return
}
w.AppendRow(table.Row{strings.TrimPrefix(prefix, "."), val[0:64] + "..." + val[len(val)-64:]})
case reflect.Array, reflect.Slice:
for i := 0; i < rv.Len(); i++ {
p := strings.Join([]string{prefix, fmt.Sprintf("[%d]", i)}, ".")
structPrinter(w, p, rv.Index(i).Interface())
}
case reflect.Map:
for _, k := range rv.MapKeys() {
structPrinter(w, fmt.Sprintf("%s.{%v}", prefix, k), rv.MapIndex(k).Interface())
}
case reflect.Pointer:
goto Start
case reflect.Struct:
for i := 0; i < rv.NumField(); i++ {
p := fmt.Sprintf("%s.%s", prefix, rv.Type().Field(i).Name)
field := rv.Field(i)
// log.Debug("TablePrinter: prefix: %s, field: %v", p, rv.Field(i))
if !field.CanInterface() {
return
}
structPrinter(w, p, field.Interface())
}
}
}
func TableMapPrinter(data []byte) {
m := make(map[string]any)
if err := json.Unmarshal(data, &m); err != nil {
log.Warn(err.Error())
return
}
t := table.NewWriter()
addRow(t, "", m)
fmt.Println(t.Render())
}
func addRow(w table.Writer, prefix string, m any) {
rv := reflect.ValueOf(m)
switch rv.Type().Kind() {
case reflect.Map:
for _, k := range rv.MapKeys() {
key := k.String()
if prefix != "" {
key = strings.Join([]string{prefix, k.String()}, ".")
}
addRow(w, key, rv.MapIndex(k).Interface())
}
case reflect.Slice, reflect.Array:
for i := 0; i < rv.Len(); i++ {
addRow(w, fmt.Sprintf("%s[%d]", prefix, i), rv.Index(i).Interface())
}
default:
w.AppendRow(table.Row{prefix, m})
}
}

19
pkg/tool/tools.go Normal file
View File

@ -0,0 +1,19 @@
package tool
import "cmp"
func Min[T cmp.Ordered](a, b T) T {
if a <= b {
return a
}
return b
}
func Max[T cmp.Ordered](a, b T) T {
if a >= b {
return a
}
return b
}