replace zxq.co/ripple/hanayo
This commit is contained in:
116
vendor/github.com/osuripple/cheesegull/models/beatmap.go
generated
vendored
Normal file
116
vendor/github.com/osuripple/cheesegull/models/beatmap.go
generated
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
package models
|
||||
|
||||
import "database/sql"
|
||||
|
||||
// Beatmap represents a single beatmap (difficulty) on osu!.
|
||||
type Beatmap struct {
|
||||
ID int `json:"BeatmapID"`
|
||||
ParentSetID int
|
||||
DiffName string
|
||||
FileMD5 string
|
||||
Mode int
|
||||
BPM float64
|
||||
AR float32
|
||||
OD float32
|
||||
CS float32
|
||||
HP float32
|
||||
TotalLength int
|
||||
HitLength int
|
||||
Playcount int
|
||||
Passcount int
|
||||
MaxCombo int
|
||||
DifficultyRating float64
|
||||
}
|
||||
|
||||
const beatmapFields = `
|
||||
id, parent_set_id, diff_name, file_md5, mode, bpm,
|
||||
ar, od, cs, hp, total_length, hit_length,
|
||||
playcount, passcount, max_combo, difficulty_rating`
|
||||
|
||||
func readBeatmapsFromRows(rows *sql.Rows, capacity int) ([]Beatmap, error) {
|
||||
var err error
|
||||
bms := make([]Beatmap, 0, capacity)
|
||||
for rows.Next() {
|
||||
var b Beatmap
|
||||
err = rows.Scan(
|
||||
&b.ID, &b.ParentSetID, &b.DiffName, &b.FileMD5, &b.Mode, &b.BPM,
|
||||
&b.AR, &b.OD, &b.CS, &b.HP, &b.TotalLength, &b.HitLength,
|
||||
&b.Playcount, &b.Passcount, &b.MaxCombo, &b.DifficultyRating,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
bms = append(bms, b)
|
||||
}
|
||||
|
||||
return bms, rows.Err()
|
||||
}
|
||||
|
||||
func inClause(length int) string {
|
||||
if length <= 0 {
|
||||
return ""
|
||||
}
|
||||
b := make([]byte, length*3-2)
|
||||
for i := 0; i < length; i++ {
|
||||
b[i*3] = '?'
|
||||
if i != length-1 {
|
||||
b[i*3+1] = ','
|
||||
b[i*3+2] = ' '
|
||||
}
|
||||
}
|
||||
return string(b)
|
||||
}
|
||||
|
||||
func sIntToSInterface(i []int) []interface{} {
|
||||
args := make([]interface{}, len(i))
|
||||
for idx, id := range i {
|
||||
args[idx] = id
|
||||
}
|
||||
return args
|
||||
}
|
||||
|
||||
// FetchBeatmaps retrieves a list of beatmap knowing their IDs.
|
||||
func FetchBeatmaps(db *sql.DB, ids ...int) ([]Beatmap, error) {
|
||||
if len(ids) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
q := `SELECT ` + beatmapFields + ` FROM beatmaps WHERE id IN (` + inClause(len(ids)) + `)`
|
||||
|
||||
rows, err := db.Query(q, sIntToSInterface(ids)...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return readBeatmapsFromRows(rows, len(ids))
|
||||
}
|
||||
|
||||
// CreateBeatmaps adds beatmaps in the database.
|
||||
func CreateBeatmaps(db *sql.DB, bms ...Beatmap) error {
|
||||
if len(bms) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
q := `INSERT INTO beatmaps(` + beatmapFields + `) VALUES `
|
||||
const valuePlaceholder = `(
|
||||
?, ?, ?, ?, ?, ?,
|
||||
?, ?, ?, ?, ?, ?,
|
||||
?, ?, ?, ?
|
||||
)`
|
||||
|
||||
args := make([]interface{}, 0, 15*4)
|
||||
for idx, bm := range bms {
|
||||
if idx != 0 {
|
||||
q += ", "
|
||||
}
|
||||
q += valuePlaceholder
|
||||
args = append(args,
|
||||
bm.ID, bm.ParentSetID, bm.DiffName, bm.FileMD5, bm.Mode, bm.BPM,
|
||||
bm.AR, bm.OD, bm.CS, bm.HP, bm.TotalLength, bm.HitLength,
|
||||
bm.Playcount, bm.Passcount, bm.MaxCombo, bm.DifficultyRating,
|
||||
)
|
||||
}
|
||||
|
||||
_, err := db.Exec(q, args...)
|
||||
return err
|
||||
}
|
52
vendor/github.com/osuripple/cheesegull/models/migrations.go
generated
vendored
Normal file
52
vendor/github.com/osuripple/cheesegull/models/migrations.go
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
// THIS FILE HAS BEEN AUTOMATICALLY GENERATED
|
||||
// To re-generate it, run "go generate" in the models folder.
|
||||
|
||||
package models
|
||||
|
||||
var migrations = [...]string{
|
||||
`CREATE TABLE sets(
|
||||
id INT NOT NULL,
|
||||
ranked_status TINYINT NOT NULL,
|
||||
approved_date DATETIME NOT NULL,
|
||||
last_update DATETIME NOT NULL,
|
||||
last_checked DATETIME NOT NULL,
|
||||
artist VARCHAR(1000) NOT NULL,
|
||||
title VARCHAR(1000) NOT NULL,
|
||||
creator VARCHAR(1000) NOT NULL,
|
||||
source VARCHAR(1000) NOT NULL,
|
||||
tags VARCHAR(1000) NOT NULL,
|
||||
has_video TINYINT NOT NULL,
|
||||
genre TINYINT NOT NULL,
|
||||
language TINYINT NOT NULL,
|
||||
favourites INT NOT NULL,
|
||||
set_modes TINYINT NOT NULL,
|
||||
PRIMARY KEY(id)
|
||||
);
|
||||
`,
|
||||
`CREATE TABLE beatmaps(
|
||||
id INT NOT NULL,
|
||||
parent_set_id INT NOT NULL,
|
||||
diff_name VARCHAR(1000) NOT NULL,
|
||||
file_md5 CHAR(32) NOT NULL,
|
||||
mode INT NOT NULL,
|
||||
bpm DECIMAL(10, 4) NOT NULL,
|
||||
ar DECIMAL(4, 2) NOT NULL,
|
||||
od DECIMAL(4, 2) NOT NULL,
|
||||
cs DECIMAL(4, 2) NOT NULL,
|
||||
hp DECIMAL(4, 2) NOT NULL,
|
||||
total_length INT NOT NULL,
|
||||
hit_length INT NOT NULL,
|
||||
playcount INT NOT NULL,
|
||||
passcount INT NOT NULL,
|
||||
max_combo INT NOT NULL,
|
||||
difficulty_rating INT NOT NULL,
|
||||
PRIMARY KEY(id),
|
||||
FOREIGN KEY (parent_set_id) REFERENCES sets(id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE
|
||||
);`,
|
||||
`ALTER TABLE sets ADD FULLTEXT(artist, title, creator, source, tags);`,
|
||||
`ALTER TABLE beatmaps MODIFY difficulty_rating DECIMAL(20, 15);
|
||||
`,
|
||||
`ALTER TABLE sets DROP INDEX artist;`,
|
||||
}
|
18
vendor/github.com/osuripple/cheesegull/models/migrations/0001.sql
generated
vendored
Normal file
18
vendor/github.com/osuripple/cheesegull/models/migrations/0001.sql
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
CREATE TABLE sets(
|
||||
id INT NOT NULL,
|
||||
ranked_status TINYINT NOT NULL,
|
||||
approved_date DATETIME NOT NULL,
|
||||
last_update DATETIME NOT NULL,
|
||||
last_checked DATETIME NOT NULL,
|
||||
artist VARCHAR(1000) NOT NULL,
|
||||
title VARCHAR(1000) NOT NULL,
|
||||
creator VARCHAR(1000) NOT NULL,
|
||||
source VARCHAR(1000) NOT NULL,
|
||||
tags VARCHAR(1000) NOT NULL,
|
||||
has_video TINYINT NOT NULL,
|
||||
genre TINYINT NOT NULL,
|
||||
language TINYINT NOT NULL,
|
||||
favourites INT NOT NULL,
|
||||
set_modes TINYINT NOT NULL,
|
||||
PRIMARY KEY(id)
|
||||
);
|
22
vendor/github.com/osuripple/cheesegull/models/migrations/0002.sql
generated
vendored
Normal file
22
vendor/github.com/osuripple/cheesegull/models/migrations/0002.sql
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
CREATE TABLE beatmaps(
|
||||
id INT NOT NULL,
|
||||
parent_set_id INT NOT NULL,
|
||||
diff_name VARCHAR(1000) NOT NULL,
|
||||
file_md5 CHAR(32) NOT NULL,
|
||||
mode INT NOT NULL,
|
||||
bpm DECIMAL(10, 4) NOT NULL,
|
||||
ar DECIMAL(4, 2) NOT NULL,
|
||||
od DECIMAL(4, 2) NOT NULL,
|
||||
cs DECIMAL(4, 2) NOT NULL,
|
||||
hp DECIMAL(4, 2) NOT NULL,
|
||||
total_length INT NOT NULL,
|
||||
hit_length INT NOT NULL,
|
||||
playcount INT NOT NULL,
|
||||
passcount INT NOT NULL,
|
||||
max_combo INT NOT NULL,
|
||||
difficulty_rating INT NOT NULL,
|
||||
PRIMARY KEY(id),
|
||||
FOREIGN KEY (parent_set_id) REFERENCES sets(id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE
|
||||
);
|
1
vendor/github.com/osuripple/cheesegull/models/migrations/0003.sql
generated
vendored
Normal file
1
vendor/github.com/osuripple/cheesegull/models/migrations/0003.sql
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE sets ADD FULLTEXT(artist, title, creator, source, tags);
|
1
vendor/github.com/osuripple/cheesegull/models/migrations/0004.sql
generated
vendored
Normal file
1
vendor/github.com/osuripple/cheesegull/models/migrations/0004.sql
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE beatmaps MODIFY difficulty_rating DECIMAL(20, 15);
|
1
vendor/github.com/osuripple/cheesegull/models/migrations/0005.sql
generated
vendored
Normal file
1
vendor/github.com/osuripple/cheesegull/models/migrations/0005.sql
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE sets DROP INDEX artist;
|
60
vendor/github.com/osuripple/cheesegull/models/migrations_gen.go
generated
vendored
Normal file
60
vendor/github.com/osuripple/cheesegull/models/migrations_gen.go
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const fileHeader = `// THIS FILE HAS BEEN AUTOMATICALLY GENERATED
|
||||
// To re-generate it, run "go generate" in the models folder.
|
||||
|
||||
package models
|
||||
|
||||
var migrations = [...]string{
|
||||
`
|
||||
|
||||
func main() {
|
||||
// ReadDir gets all the files in the directory and then sorts them
|
||||
// alphabetically - thus we can be sure 0000 will come first and 0001 will
|
||||
// come afterwards.
|
||||
files, err := ioutil.ReadDir("migrations")
|
||||
check(err)
|
||||
|
||||
out, err := os.Create("migrations.go")
|
||||
check(err)
|
||||
|
||||
_, err = out.WriteString(fileHeader)
|
||||
check(err)
|
||||
|
||||
for _, file := range files {
|
||||
if !strings.HasSuffix(file.Name(), ".sql") || file.IsDir() {
|
||||
continue
|
||||
}
|
||||
f, err := os.Open("migrations/" + file.Name())
|
||||
check(err)
|
||||
|
||||
out.WriteString("\t`")
|
||||
_, err = io.Copy(out, f)
|
||||
check(err)
|
||||
out.WriteString("`,\n")
|
||||
|
||||
f.Close()
|
||||
}
|
||||
|
||||
_, err = out.WriteString("}\n")
|
||||
check(err)
|
||||
|
||||
check(out.Close())
|
||||
}
|
||||
|
||||
func check(err error) {
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stdout, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
51
vendor/github.com/osuripple/cheesegull/models/models.go
generated
vendored
Normal file
51
vendor/github.com/osuripple/cheesegull/models/models.go
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
// Package models contains everything that is needed to interface to the
|
||||
// database CheeseGull is using.
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
//go:generate go run migrations_gen.go
|
||||
|
||||
// RunMigrations brings the database up to date following the migrations.
|
||||
func RunMigrations(db *sql.DB) error {
|
||||
var version int
|
||||
var _b []byte
|
||||
err := db.QueryRow("SHOW TABLES LIKE 'db_version'").Scan(&_b)
|
||||
switch err {
|
||||
case nil:
|
||||
// fetch version from db
|
||||
err = db.QueryRow("SELECT version FROM db_version").Scan(&version)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case sql.ErrNoRows:
|
||||
_, err = db.Exec("CREATE TABLE db_version(version INT NOT NULL)")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = db.Exec("INSERT INTO db_version(version) VALUES ('-1')")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
version = -1
|
||||
default:
|
||||
return err
|
||||
}
|
||||
|
||||
for {
|
||||
version++
|
||||
if version >= len(migrations) {
|
||||
version--
|
||||
db.Exec("UPDATE db_version SET version = ?", version)
|
||||
return nil
|
||||
}
|
||||
|
||||
s := migrations[version]
|
||||
_, err = db.Exec(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
160
vendor/github.com/osuripple/cheesegull/models/set.go
generated
vendored
Normal file
160
vendor/github.com/osuripple/cheesegull/models/set.go
generated
vendored
Normal file
@@ -0,0 +1,160 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Set represents a set of beatmaps usually sharing the same song.
|
||||
type Set struct {
|
||||
ID int `json:"SetID"`
|
||||
ChildrenBeatmaps []Beatmap
|
||||
RankedStatus int
|
||||
ApprovedDate time.Time
|
||||
LastUpdate time.Time
|
||||
LastChecked time.Time
|
||||
Artist string
|
||||
Title string
|
||||
Creator string
|
||||
Source string
|
||||
Tags string
|
||||
HasVideo bool
|
||||
Genre int
|
||||
Language int
|
||||
Favourites int
|
||||
}
|
||||
|
||||
const setFields = `id, ranked_status, approved_date, last_update, last_checked,
|
||||
artist, title, creator, source, tags, has_video, genre,
|
||||
language, favourites`
|
||||
|
||||
// FetchSetsForBatchUpdate fetches limit sets from the database, sorted by
|
||||
// LastChecked (asc, older first). Results are further filtered: if the set's
|
||||
// RankedStatus is 3, 0 or -1 (qualified, pending or WIP), at least 30 minutes
|
||||
// must have passed from LastChecked. For all other statuses, at least 4 days
|
||||
// must have passed from LastChecked.
|
||||
func FetchSetsForBatchUpdate(db *sql.DB, limit int) ([]Set, error) {
|
||||
n := time.Now()
|
||||
rows, err := db.Query(`
|
||||
SELECT `+setFields+` FROM sets
|
||||
WHERE (ranked_status IN (3, 0, -1) AND last_checked <= ?) OR last_checked <= ?
|
||||
ORDER BY last_checked ASC
|
||||
LIMIT ?`,
|
||||
n.Add(-time.Minute*30),
|
||||
n.Add(-time.Hour*24*4),
|
||||
limit,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sets := make([]Set, 0, limit)
|
||||
for rows.Next() {
|
||||
var s Set
|
||||
err = rows.Scan(
|
||||
&s.ID, &s.RankedStatus, &s.ApprovedDate, &s.LastUpdate, &s.LastChecked,
|
||||
&s.Artist, &s.Title, &s.Creator, &s.Source, &s.Tags, &s.HasVideo, &s.Genre,
|
||||
&s.Language, &s.Favourites,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sets = append(sets, s)
|
||||
}
|
||||
|
||||
return sets, rows.Err()
|
||||
}
|
||||
|
||||
// FetchSet retrieves a single set to show, alongside its children beatmaps.
|
||||
func FetchSet(db *sql.DB, id int, withChildren bool) (*Set, error) {
|
||||
var s Set
|
||||
err := db.QueryRow(`SELECT `+setFields+` FROM sets WHERE id = ? LIMIT 1`, id).Scan(
|
||||
&s.ID, &s.RankedStatus, &s.ApprovedDate, &s.LastUpdate, &s.LastChecked,
|
||||
&s.Artist, &s.Title, &s.Creator, &s.Source, &s.Tags, &s.HasVideo, &s.Genre,
|
||||
&s.Language, &s.Favourites,
|
||||
)
|
||||
switch err {
|
||||
case nil:
|
||||
break // carry on
|
||||
case sql.ErrNoRows:
|
||||
// silently ignore no rows, and just don't return anything
|
||||
return nil, nil
|
||||
default:
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !withChildren {
|
||||
return &s, nil
|
||||
}
|
||||
|
||||
rows, err := db.Query(`SELECT `+beatmapFields+` FROM beatmaps WHERE parent_set_id = ?`, s.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.ChildrenBeatmaps, err = readBeatmapsFromRows(rows, 8)
|
||||
return &s, err
|
||||
}
|
||||
|
||||
// DeleteSet deletes a set from the database, removing also its children
|
||||
// beatmaps.
|
||||
func DeleteSet(db *sql.DB, set int) error {
|
||||
_, err := db.Exec("DELETE FROM beatmaps WHERE parent_set_id = ?", set)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = db.Exec("DELETE FROM sets WHERE id = ?", set)
|
||||
return err
|
||||
}
|
||||
|
||||
// createSetModes will generate the correct value for setModes, which is
|
||||
// basically a bitwise enum containing the modes that are on a certain set.
|
||||
func createSetModes(bms []Beatmap) (setModes uint8) {
|
||||
for _, bm := range bms {
|
||||
m := bm.Mode
|
||||
if m < 0 || m >= 4 {
|
||||
continue
|
||||
}
|
||||
setModes |= 1 << uint(m)
|
||||
}
|
||||
return setModes
|
||||
}
|
||||
|
||||
// CreateSet creates (and updates) a beatmap set in the database.
|
||||
func CreateSet(db *sql.DB, s Set) error {
|
||||
// delete existing set, if any.
|
||||
// This is mostly a lazy way to make sure updates work as well.
|
||||
err := DeleteSet(db, s.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = db.Exec(`
|
||||
INSERT INTO sets(
|
||||
id, ranked_status, approved_date, last_update, last_checked,
|
||||
artist, title, creator, source, tags, has_video, genre,
|
||||
language, favourites, set_modes
|
||||
)
|
||||
VALUES (
|
||||
?, ?, ?, ?, ?,
|
||||
?, ?, ?, ?, ?, ?, ?,
|
||||
?, ?, ?
|
||||
)`, s.ID, s.RankedStatus, s.ApprovedDate, s.LastUpdate, s.LastChecked,
|
||||
s.Artist, s.Title, s.Creator, s.Source, s.Tags, s.HasVideo, s.Genre,
|
||||
s.Language, s.Favourites, createSetModes(s.ChildrenBeatmaps))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return CreateBeatmaps(db, s.ChildrenBeatmaps...)
|
||||
}
|
||||
|
||||
// BiggestSetID retrieves the biggest set ID in the sets database. This is used
|
||||
// by discovery to have a starting point from which to discover new beatmaps.
|
||||
func BiggestSetID(db *sql.DB) (int, error) {
|
||||
var i int
|
||||
err := db.QueryRow("SELECT id FROM sets ORDER BY id DESC LIMIT 1").Scan(&i)
|
||||
if err == sql.ErrNoRows {
|
||||
return 0, nil
|
||||
}
|
||||
return i, err
|
||||
}
|
158
vendor/github.com/osuripple/cheesegull/models/set_search.go
generated
vendored
Normal file
158
vendor/github.com/osuripple/cheesegull/models/set_search.go
generated
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// SearchOptions are options that can be passed to SearchSets for filtering
|
||||
// sets.
|
||||
type SearchOptions struct {
|
||||
// If len is 0, then it should be treated as if all statuses are good.
|
||||
Status []int
|
||||
Query string
|
||||
// Gamemodes to which limit the results. If len is 0, it means all modes
|
||||
// are ok.
|
||||
Mode []int
|
||||
|
||||
// Pagination options.
|
||||
Offset int
|
||||
Amount int
|
||||
}
|
||||
|
||||
func (o SearchOptions) setModes() (total uint8) {
|
||||
for _, m := range o.Mode {
|
||||
if m < 0 || m >= 4 {
|
||||
continue
|
||||
}
|
||||
total |= 1 << uint8(m)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
var mysqlStringReplacer = strings.NewReplacer(
|
||||
`\`, `\\`,
|
||||
`"`, `\"`,
|
||||
`'`, `\'`,
|
||||
"\x00", `\0`,
|
||||
"\n", `\n`,
|
||||
"\r", `\r`,
|
||||
"\x1a", `\Z`,
|
||||
)
|
||||
|
||||
func sIntCommaSeparated(nums []int) string {
|
||||
b := bytes.Buffer{}
|
||||
for idx, num := range nums {
|
||||
b.WriteString(strconv.Itoa(num))
|
||||
if idx != len(nums)-1 {
|
||||
b.WriteString(", ")
|
||||
}
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// SearchSets retrieves sets, filtering them using SearchOptions.
|
||||
func SearchSets(db, searchDB *sql.DB, opts SearchOptions) ([]Set, error) {
|
||||
sm := strconv.Itoa(int(opts.setModes()))
|
||||
setIDsQuery := "SELECT id, set_modes & " + sm + " AS valid_set_modes FROM cg WHERE "
|
||||
|
||||
// add filters to query
|
||||
// Yes. I know. Prepared statements. But Sphinx doesn't like them, so
|
||||
// bummer.
|
||||
setIDsQuery += "MATCH('" + mysqlStringReplacer.Replace(opts.Query) + "') "
|
||||
if len(opts.Status) != 0 {
|
||||
setIDsQuery += "AND ranked_status IN (" + sIntCommaSeparated(opts.Status) + ") "
|
||||
}
|
||||
if len(opts.Mode) != 0 {
|
||||
// This is a hack. Apparently, Sphinx does not support AND bitwise
|
||||
// operations in the WHERE clause, so we're placing that in the SELECT
|
||||
// clause and only making sure it's correct in this place.
|
||||
setIDsQuery += "AND valid_set_modes = " + sm + " "
|
||||
}
|
||||
|
||||
// set limit
|
||||
setIDsQuery += fmt.Sprintf("ORDER BY WEIGHT() DESC, id DESC LIMIT %d, %d OPTION ranker=sph04", opts.Offset, opts.Amount)
|
||||
|
||||
// fetch rows
|
||||
rows, err := searchDB.Query(setIDsQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// from the rows we will retrieve the IDs of all our sets.
|
||||
// we also pre-create the slices containing the sets we will fill later on
|
||||
// when we fetch the actual data.
|
||||
setIDs := make([]int, 0, opts.Amount)
|
||||
sets := make([]Set, 0, opts.Amount)
|
||||
// setMap, having an ID, points to a position of a set contained in sets.
|
||||
setMap := make(map[int]int, opts.Amount)
|
||||
for rows.Next() {
|
||||
var id int
|
||||
err = rows.Scan(&id, new(int))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
setIDs = append(setIDs, id)
|
||||
sets = append(sets, Set{})
|
||||
setMap[id] = len(sets) - 1
|
||||
}
|
||||
|
||||
// short circuit: there are no sets
|
||||
if len(sets) == 0 {
|
||||
return []Set{}, nil
|
||||
}
|
||||
|
||||
setsQuery := "SELECT " + setFields + " FROM sets WHERE id IN (" + inClause(len(setIDs)) + ")"
|
||||
args := sIntToSInterface(setIDs)
|
||||
|
||||
rows, err = db.Query(setsQuery, args...)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// find all beatmaps, but leave children aside for the moment.
|
||||
for rows.Next() {
|
||||
var s Set
|
||||
err = rows.Scan(
|
||||
&s.ID, &s.RankedStatus, &s.ApprovedDate, &s.LastUpdate, &s.LastChecked,
|
||||
&s.Artist, &s.Title, &s.Creator, &s.Source, &s.Tags, &s.HasVideo, &s.Genre,
|
||||
&s.Language, &s.Favourites,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sets[setMap[s.ID]] = s
|
||||
}
|
||||
|
||||
rows, err = db.Query(
|
||||
"SELECT "+beatmapFields+" FROM beatmaps WHERE parent_set_id IN ("+
|
||||
inClause(len(setIDs))+")",
|
||||
sIntToSInterface(setIDs)...,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for rows.Next() {
|
||||
var b Beatmap
|
||||
err = rows.Scan(
|
||||
&b.ID, &b.ParentSetID, &b.DiffName, &b.FileMD5, &b.Mode, &b.BPM,
|
||||
&b.AR, &b.OD, &b.CS, &b.HP, &b.TotalLength, &b.HitLength,
|
||||
&b.Playcount, &b.Passcount, &b.MaxCombo, &b.DifficultyRating,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
parentSet, ok := setMap[b.ParentSetID]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
sets[parentSet].ChildrenBeatmaps = append(sets[parentSet].ChildrenBeatmaps, b)
|
||||
}
|
||||
|
||||
return sets, nil
|
||||
}
|
Reference in New Issue
Block a user