图片列表查询
This commit is contained in:
77
bin/main.go
77
bin/main.go
@@ -1,15 +1,19 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
|
"encoding/json"
|
||||||
|
|
||||||
"git.satori.love/gameui/webp/models"
|
"git.satori.love/gameui/webp/models"
|
||||||
_ "github.com/go-sql-driver/mysql"
|
_ "github.com/go-sql-driver/mysql"
|
||||||
)
|
)
|
||||||
@@ -35,12 +39,85 @@ func LogComponent(startTime int64, r *http.Request) {
|
|||||||
log.Println(r.Method, r.URL.Path, endTime)
|
log.Println(r.Method, r.URL.Path, endTime)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Image struct {
|
||||||
|
Id int `json:"id"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
CreateTime time.Time `json:"create_time"`
|
||||||
|
UpdateTime time.Time `json:"update_time"`
|
||||||
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
runtime.GOMAXPROCS(runtime.NumCPU())
|
runtime.GOMAXPROCS(runtime.NumCPU())
|
||||||
|
|
||||||
var mysqlConnection models.MysqlConnection
|
var mysqlConnection models.MysqlConnection
|
||||||
mysqlConnection.Init()
|
mysqlConnection.Init()
|
||||||
|
|
||||||
|
// 获取图片信息列表(分页)
|
||||||
|
http.HandleFunc("/images", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
defer LogComponent(time.Now().UnixNano(), r) // 最后打印日志
|
||||||
|
|
||||||
|
// 私域: (自己的图片, 自己的文章, 自己的精选集, 点赞收藏精选集)
|
||||||
|
// 条件查询(模糊搜索, 时间区间, 作者, 标签, 分类, 精选集, 状态, 置顶, 模糊权重)(权重规则:权重指数)
|
||||||
|
// 条件筛选(交集, 并集, 差集, 子集)
|
||||||
|
// 排序
|
||||||
|
// 分页
|
||||||
|
|
||||||
|
// 按标签查询
|
||||||
|
var tags string
|
||||||
|
if list := strings.Split(r.URL.Query().Get("tags"), ","); len(list) > 0 {
|
||||||
|
for _, tag := range list {
|
||||||
|
tags += fmt.Sprintf("'%s',", tag)
|
||||||
|
}
|
||||||
|
tags = tags[:len(tags)-1]
|
||||||
|
fmt.Println(tags)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 按分类查询
|
||||||
|
var categories string
|
||||||
|
if list := strings.Split(r.URL.Query().Get("categories"), ","); len(list) > 0 {
|
||||||
|
for _, category := range list {
|
||||||
|
categories += fmt.Sprintf("'%s',", category)
|
||||||
|
}
|
||||||
|
categories = categories[:len(categories)-1]
|
||||||
|
fmt.Println(categories)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 获取图片列表
|
||||||
|
var images []Image
|
||||||
|
page, size := stringToInt(r.URL.Query().Get("page"), 1), stringToInt(r.URL.Query().Get("pageSize"), 10)
|
||||||
|
rows, err := mysqlConnection.Database.Query("SELECT id, content, update_time, create_time FROM web_images LIMIT ?, ?", (page-1)*size, size)
|
||||||
|
if err != nil {
|
||||||
|
log.Println("获取图片列表失败", err)
|
||||||
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// 处理结果集
|
||||||
|
defer rows.Close()
|
||||||
|
for rows.Next() {
|
||||||
|
var image Image
|
||||||
|
rows.Scan(&image.Id, &image.Content, &image.UpdateTime, &image.CreateTime)
|
||||||
|
image.UpdateTime = image.UpdateTime.UTC()
|
||||||
|
image.CreateTime = image.CreateTime.UTC()
|
||||||
|
image.Content = regexp.MustCompile(`http:`).ReplaceAllString(image.Content, "https:")
|
||||||
|
images = append(images, image)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 将对象转换为JSON
|
||||||
|
data, err := json.Marshal(images)
|
||||||
|
if err != nil {
|
||||||
|
log.Println("转换图片列表失败", err)
|
||||||
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// 格式化为有缩进的JSON输出
|
||||||
|
var out bytes.Buffer
|
||||||
|
json.Indent(&out, data, "", " ")
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
w.Write(out.Bytes())
|
||||||
|
})
|
||||||
|
|
||||||
// URL 格式: /img/{type}-{id}.{format}?width=320&height=320&fit=cover
|
// URL 格式: /img/{type}-{id}.{format}?width=320&height=320&fit=cover
|
||||||
http.HandleFunc("/img/", func(w http.ResponseWriter, r *http.Request) {
|
http.HandleFunc("/img/", func(w http.ResponseWriter, r *http.Request) {
|
||||||
defer LogComponent(time.Now().UnixNano(), r) // 最后打印日志
|
defer LogComponent(time.Now().UnixNano(), r) // 最后打印日志
|
||||||
|
@@ -9,8 +9,13 @@ import (
|
|||||||
_ "github.com/go-sql-driver/mysql"
|
_ "github.com/go-sql-driver/mysql"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// 初始化数据库连接
|
||||||
|
//func (m *MysqlConnectionx) Init() {
|
||||||
|
// fmt.Println("初始化数据库连接")
|
||||||
|
//}
|
||||||
|
|
||||||
type MysqlConnection struct {
|
type MysqlConnection struct {
|
||||||
db *sql.DB
|
Database *sql.DB
|
||||||
}
|
}
|
||||||
|
|
||||||
// 初始化数据库连接
|
// 初始化数据库连接
|
||||||
@@ -22,7 +27,7 @@ func (m *MysqlConnection) Init() (err error) {
|
|||||||
port := viper.Get("mysql.port").(int)
|
port := viper.Get("mysql.port").(int)
|
||||||
database := viper.Get("mysql.database").(string)
|
database := viper.Get("mysql.database").(string)
|
||||||
sqlconf := user + ":" + password + "@tcp(" + host + ":" + strconv.Itoa(port) + ")/" + database + "?charset=utf8mb4&parseTime=True&loc=Local"
|
sqlconf := user + ":" + password + "@tcp(" + host + ":" + strconv.Itoa(port) + ")/" + database + "?charset=utf8mb4&parseTime=True&loc=Local"
|
||||||
m.db, err = sql.Open("mysql", sqlconf) // 连接数据库
|
m.Database, err = sql.Open("mysql", sqlconf) // 连接数据库
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Println("连接数据库失败", err)
|
log.Println("连接数据库失败", err)
|
||||||
return
|
return
|
||||||
@@ -34,17 +39,43 @@ func (m *MysqlConnection) Init() (err error) {
|
|||||||
func (m *MysqlConnection) GetImageContent(group string, id string) (content string, err error) {
|
func (m *MysqlConnection) GetImageContent(group string, id string) (content string, err error) {
|
||||||
switch group {
|
switch group {
|
||||||
case "article":
|
case "article":
|
||||||
err = m.db.QueryRow("SELECT image FROM web_article WHERE id=" + id).Scan(&content)
|
err = m.Database.QueryRow("SELECT image FROM web_article WHERE id=" + id).Scan(&content)
|
||||||
case "article_attribute":
|
case "article_attribute":
|
||||||
err = m.db.QueryRow("SELECT image FROM web_article_attribute WHERE id=" + id).Scan(&content)
|
err = m.Database.QueryRow("SELECT image FROM web_article_attribute WHERE id=" + id).Scan(&content)
|
||||||
case "ad":
|
case "ad":
|
||||||
err = m.db.QueryRow("SELECT image FROM web_ad WHERE id=" + id).Scan(&content)
|
err = m.Database.QueryRow("SELECT image FROM web_ad WHERE id=" + id).Scan(&content)
|
||||||
case "avatar":
|
case "avatar":
|
||||||
err = m.db.QueryRow("SELECT avatar FROM web_member WHERE id=" + id).Scan(&content)
|
err = m.Database.QueryRow("SELECT avatar FROM web_member WHERE id=" + id).Scan(&content)
|
||||||
case "image":
|
case "image":
|
||||||
err = m.db.QueryRow("SELECT content FROM web_images WHERE id=" + id).Scan(&content)
|
err = m.Database.QueryRow("SELECT content FROM web_images WHERE id=" + id).Scan(&content)
|
||||||
default:
|
default:
|
||||||
err = errors.New("group 参数错误")
|
err = errors.New("group 参数错误")
|
||||||
}
|
}
|
||||||
return content, err
|
return content, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 获取图片列表
|
||||||
|
func (m *MysqlConnection) GetImages(page int, size int) (images []byte, err error) {
|
||||||
|
rows, err := m.Database.Query("SELECT id, group, content FROM web_images LIMIT ?, ?", (page-1)*size, size)
|
||||||
|
if err != nil {
|
||||||
|
log.Println("获取图片列表失败", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
images = []byte("[")
|
||||||
|
for rows.Next() {
|
||||||
|
var id int
|
||||||
|
var group string
|
||||||
|
var content string
|
||||||
|
err = rows.Scan(&id, &group, &content)
|
||||||
|
if err != nil {
|
||||||
|
log.Println("获取图片列表失败", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
images = append(images, []byte("{\"id\":"+strconv.Itoa(id)+",\"group\":\""+group+"\",\"content\":\""+content+"\"},")...)
|
||||||
|
}
|
||||||
|
images = images[:len(images)-1]
|
||||||
|
images = append(images, []byte("]")...)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
Reference in New Issue
Block a user