mirror of
https://git.macaw.me/skunky/SkunkyArt.git
synced 2025-04-28 03:55:07 +03:00
Подготовка к релизу v1.3.1
This commit is contained in:
parent
e02174cb71
commit
2dfeaae772
17 changed files with 357 additions and 218 deletions
|
@ -3,14 +3,18 @@ package app
|
|||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"git.macaw.me/skunky/devianter"
|
||||
)
|
||||
|
||||
type cache_config struct {
|
||||
Enabled bool
|
||||
Path string
|
||||
MaxSize int64 `json:"max-size"`
|
||||
Lifetime int64
|
||||
Lifetime string
|
||||
UpdateInterval int64 `json:"update-interval"`
|
||||
}
|
||||
|
||||
|
@ -20,6 +24,7 @@ type config struct {
|
|||
BasePath string `json:"base-path"`
|
||||
Cache cache_config
|
||||
Proxy, Nsfw bool
|
||||
UserAgent string `json:"user-agent"`
|
||||
DownloadProxy string `json:"download-proxy"`
|
||||
Dirs []string `json:"dirs-to-memory"`
|
||||
}
|
||||
|
@ -33,31 +38,36 @@ var CFG = config{
|
|||
Path: "cache",
|
||||
UpdateInterval: 1,
|
||||
},
|
||||
Dirs: []string{"html", "css"},
|
||||
Proxy: true,
|
||||
Nsfw: true,
|
||||
Dirs: []string{"html", "css"},
|
||||
UserAgent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36",
|
||||
Proxy: true,
|
||||
Nsfw: true,
|
||||
}
|
||||
|
||||
var lifetimeParsed int64
|
||||
|
||||
func ExecuteConfig() {
|
||||
go func() {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
recover()
|
||||
}
|
||||
}()
|
||||
for {
|
||||
Templates["instances.json"] = string(Download("https://git.macaw.me/skunky/SkunkyArt/raw/branch/master/instances.json").Body)
|
||||
time.Sleep(1 * time.Second)
|
||||
func() {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
recover()
|
||||
}
|
||||
}()
|
||||
Templates["instances.json"] = string(Download("https://git.macaw.me/skunky/SkunkyArt/raw/branch/master/instances.json").Body)
|
||||
}()
|
||||
time.Sleep(1 * time.Hour)
|
||||
}
|
||||
}()
|
||||
|
||||
const helpmsg = `SkunkyArt v1.3 [refactoring]
|
||||
const helpmsg = `SkunkyArt v1.3.1 [CSS improvements for mobile and strips on Daily Deviations]
|
||||
Usage:
|
||||
- [-c|--config] - path to config
|
||||
- [-h|--help] - returns this message
|
||||
Example:
|
||||
./skunkyart -c config.json
|
||||
Copyright lost+skunk, X11. https://git.macaw.me/skunky/skunkyart/src/tag/v1.3`
|
||||
Copyright lost+skunk, X11. https://git.macaw.me/skunky/skunkyart/src/tag/v1.3.1`
|
||||
|
||||
a := os.Args
|
||||
for n, x := range a {
|
||||
|
@ -75,15 +85,42 @@ Copyright lost+skunk, X11. https://git.macaw.me/skunky/skunkyart/src/tag/v1.3`
|
|||
|
||||
if CFG.cfg != "" {
|
||||
f, err := os.ReadFile(CFG.cfg)
|
||||
try_with_exitstatus(err, 1)
|
||||
tryWithExitStatus(err, 1)
|
||||
|
||||
try_with_exitstatus(json.Unmarshal(f, &CFG), 1)
|
||||
tryWithExitStatus(json.Unmarshal(f, &CFG), 1)
|
||||
if CFG.Cache.Enabled && !CFG.Proxy {
|
||||
exit("Incompatible settings detected: cannot use caching media content without proxy", 1)
|
||||
}
|
||||
|
||||
if CFG.Cache.MaxSize != 0 || CFG.Cache.Lifetime != 0 {
|
||||
if CFG.Cache.Enabled {
|
||||
if CFG.Cache.Lifetime != "" {
|
||||
var duration int64
|
||||
day := 24 * time.Hour.Milliseconds()
|
||||
numstr := regexp.MustCompile("[0-9]+").FindAllString(CFG.Cache.Lifetime, -1)
|
||||
num, _ := strconv.Atoi(numstr[len(numstr)-1])
|
||||
|
||||
switch unit := CFG.Cache.Lifetime[len(CFG.Cache.Lifetime)-1:]; unit {
|
||||
case "i":
|
||||
duration = time.Minute.Milliseconds()
|
||||
case "h":
|
||||
duration = time.Hour.Milliseconds()
|
||||
case "d":
|
||||
duration = day
|
||||
case "w":
|
||||
duration = day * 7
|
||||
case "m":
|
||||
duration = day * 30
|
||||
case "y":
|
||||
duration = day * 360
|
||||
default:
|
||||
exit("Invalid unit specified: "+unit, 1)
|
||||
}
|
||||
|
||||
lifetimeParsed = duration * int64(num)
|
||||
}
|
||||
CFG.Cache.MaxSize /= 1024 ^ 2
|
||||
go InitCacheSystem()
|
||||
}
|
||||
devianter.UserAgent = CFG.UserAgent
|
||||
}
|
||||
}
|
||||
|
|
217
app/parsers.go
217
app/parsers.go
|
@ -70,17 +70,81 @@ func (s skunkyart) ParseComments(c devianter.Comments) string {
|
|||
return cmmts.String()
|
||||
}
|
||||
|
||||
func (s skunkyart) DeviationList(devs []devianter.Deviation, content ...DeviationList) string {
|
||||
var list strings.Builder
|
||||
func (s skunkyart) DeviationList(devs []devianter.Deviation, allowAtom bool, content ...DeviationList) string {
|
||||
if s.Atom && s.Page > 1 {
|
||||
s.ReturnHTTPError(400)
|
||||
return ""
|
||||
} else if s.Atom {
|
||||
}
|
||||
|
||||
var list, listContent strings.Builder
|
||||
|
||||
for i, l := 0, len(devs); i < l; i++ {
|
||||
data := &devs[i]
|
||||
if preview, fullview := ParseMedia(data.Media, 320), ParseMedia(data.Media); !(data.NSFW && !CFG.Nsfw) {
|
||||
if allowAtom && s.Atom {
|
||||
id := strconv.Itoa(data.ID)
|
||||
listContent.WriteString(`<entry><author><name>`)
|
||||
listContent.WriteString(data.Author.Username)
|
||||
listContent.WriteString(`</name></author><title>`)
|
||||
listContent.WriteString(data.Title)
|
||||
listContent.WriteString(`</title><link rel="alternate" type="text/html" href="`)
|
||||
listContent.WriteString(UrlBuilder("post", data.Author.Username, "atom-"+id))
|
||||
listContent.WriteString(`"/><id>`)
|
||||
listContent.WriteString(id)
|
||||
listContent.WriteString(`</id><published>`)
|
||||
listContent.WriteString(data.PublishedTime.UTC().Format("Mon, 02 Jan 2006 15:04:05 -0700"))
|
||||
listContent.WriteString(`</published>`)
|
||||
listContent.WriteString(`<media:group><media:title>`)
|
||||
listContent.WriteString(data.Title)
|
||||
listContent.WriteString(`</media:title><media:thumbinal url="`)
|
||||
listContent.WriteString(preview)
|
||||
listContent.WriteString(`"/></media:group><content type="xhtml"><div xmlns="http://www.w3.org/1999/xhtml"><a href="`)
|
||||
listContent.WriteString(ConvertDeviantArtUrlToSkunkyArt(data.Url))
|
||||
listContent.WriteString(`"><img src="`)
|
||||
listContent.WriteString(fullview)
|
||||
listContent.WriteString(`"/></a><p>`)
|
||||
listContent.WriteString(ParseDescription(data.TextContent))
|
||||
listContent.WriteString(`</p></div></content></entry>`)
|
||||
} else {
|
||||
listContent.WriteString(`<div class="block">`)
|
||||
if fullview != "" && preview != "" {
|
||||
listContent.WriteString(`<a title="open/download" href="`)
|
||||
listContent.WriteString(fullview)
|
||||
listContent.WriteString(`"><img loading="lazy" src="`)
|
||||
listContent.WriteString(preview)
|
||||
listContent.WriteString(`" width="15%"></a>`)
|
||||
} else {
|
||||
listContent.WriteString(`<h1>[ TEXT ]</h1>`)
|
||||
}
|
||||
listContent.WriteString(`<br><a href="`)
|
||||
listContent.WriteString(ConvertDeviantArtUrlToSkunkyArt(data.Url))
|
||||
listContent.WriteString(`">`)
|
||||
listContent.WriteString(data.Author.Username)
|
||||
listContent.WriteString(" - ")
|
||||
listContent.WriteString(data.Title)
|
||||
|
||||
if data.NSFW {
|
||||
listContent.WriteString(` [<span class="nsfw">NSFW</span>]`)
|
||||
}
|
||||
if data.AI {
|
||||
listContent.WriteString(" [🤖]")
|
||||
}
|
||||
if data.DD {
|
||||
listContent.WriteString(` [<span class="dd">DD</span>]`)
|
||||
}
|
||||
|
||||
listContent.WriteString("</a></div>")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if allowAtom && s.Atom {
|
||||
list.WriteString(`<?xml version="1.0" encoding="UTF-8"?><feed xmlns:media="http://search.yahoo.com/mrss/" xmlns="http://www.w3.org/2005/Atom">`)
|
||||
|
||||
list.WriteString(`<title>`)
|
||||
if s.Type == 0 {
|
||||
list.WriteString("Daily Deviations")
|
||||
} else if len(devs) != 0 {
|
||||
} else if s.Type == 'g' && len(devs) != 0 {
|
||||
list.WriteString(devs[0].Author.Username)
|
||||
} else {
|
||||
list.WriteString("SkunkyArt")
|
||||
|
@ -90,75 +154,16 @@ func (s skunkyart) DeviationList(devs []devianter.Deviation, content ...Deviatio
|
|||
list.WriteString(`<link rel="alternate" href="`)
|
||||
list.WriteString(Host)
|
||||
list.WriteString(`"/>`)
|
||||
|
||||
list.WriteString(listContent.String())
|
||||
|
||||
list.WriteString("</feed>")
|
||||
wr(s.Writer, list.String())
|
||||
} else {
|
||||
list.WriteString(`<div class="content">`)
|
||||
}
|
||||
for _, data := range devs {
|
||||
if !(data.NSFW && !CFG.Nsfw) {
|
||||
url := ParseMedia(data.Media)
|
||||
if s.Atom {
|
||||
id := strconv.Itoa(data.ID)
|
||||
list.WriteString(`<entry><author><name>`)
|
||||
list.WriteString(data.Author.Username)
|
||||
list.WriteString(`</name></author><title>`)
|
||||
list.WriteString(data.Title)
|
||||
list.WriteString(`</title><link rel="alternate" type="text/html" href="`)
|
||||
list.WriteString(UrlBuilder("post", data.Author.Username, "atom-"+id))
|
||||
list.WriteString(`"/><id>`)
|
||||
list.WriteString(id)
|
||||
list.WriteString(`</id><published>`)
|
||||
list.WriteString(data.PublishedTime.UTC().Format("Mon, 02 Jan 2006 15:04:05 -0700"))
|
||||
list.WriteString(`</published>`)
|
||||
list.WriteString(`<media:group><media:title>`)
|
||||
list.WriteString(data.Title)
|
||||
list.WriteString(`</media:title><media:thumbinal url="`)
|
||||
list.WriteString(url)
|
||||
list.WriteString(`"/></media:group><content type="xhtml"><div xmlns="http://www.w3.org/1999/xhtml"><a href="`)
|
||||
list.WriteString(ConvertDeviantArtUrlToSkunkyArt(data.Url))
|
||||
list.WriteString(`"><img src="`)
|
||||
list.WriteString(url)
|
||||
list.WriteString(`"/></a><p>`)
|
||||
list.WriteString(ParseDescription(data.TextContent))
|
||||
list.WriteString(`</p></div></content></entry>`)
|
||||
} else {
|
||||
list.WriteString(`<div class="block">`)
|
||||
if url != "" {
|
||||
list.WriteString(`<a title="open/download" href="`)
|
||||
list.WriteString(url)
|
||||
list.WriteString(`"><img loading="lazy" src="`)
|
||||
list.WriteString(url)
|
||||
list.WriteString(`" width="15%"></a>`)
|
||||
} else {
|
||||
list.WriteString(`<h1>[ TEXT ]</h1>`)
|
||||
}
|
||||
list.WriteString(`<br><a href="`)
|
||||
list.WriteString(ConvertDeviantArtUrlToSkunkyArt(data.Url))
|
||||
list.WriteString(`">`)
|
||||
list.WriteString(data.Author.Username)
|
||||
list.WriteString(" - ")
|
||||
list.WriteString(data.Title)
|
||||
|
||||
// шильдики нсфв, аи и ежедневного поста
|
||||
if data.NSFW {
|
||||
list.WriteString(` [<span class="nsfw">NSFW</span>]`)
|
||||
}
|
||||
if data.AI {
|
||||
list.WriteString(" [🤖]")
|
||||
}
|
||||
if data.DD {
|
||||
list.WriteString(` [<span class="dd">DD</span>]`)
|
||||
}
|
||||
list.WriteString(listContent.String())
|
||||
|
||||
list.WriteString("</a></div>")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s.Atom {
|
||||
list.WriteString("</feed>")
|
||||
s.Writer.Write([]byte(list.String()))
|
||||
return ""
|
||||
} else {
|
||||
list.WriteString("</div>")
|
||||
if content != nil {
|
||||
list.WriteString(s.NavBase(content[0]))
|
||||
|
@ -177,7 +182,7 @@ type text struct {
|
|||
}
|
||||
|
||||
func ParseDescription(dscr devianter.Text) string {
|
||||
var parseddescription strings.Builder
|
||||
var parsedDescription strings.Builder
|
||||
TagBuilder := func(content string, tags ...string) string {
|
||||
l := len(tags)
|
||||
for x := 0; x < l; x++ {
|
||||
|
@ -274,16 +279,18 @@ func ParseDescription(dscr devianter.Text) string {
|
|||
|
||||
switch x.Type {
|
||||
case "atomic":
|
||||
d := entities[x.EntityRanges[0].Key]
|
||||
parseddescription.WriteString(`<a href="`)
|
||||
parseddescription.WriteString(ConvertDeviantArtUrlToSkunkyArt(d.Url))
|
||||
parseddescription.WriteString(`"><img width="50%" src="`)
|
||||
parseddescription.WriteString(ParseMedia(d.Media))
|
||||
parseddescription.WriteString(`" title="`)
|
||||
parseddescription.WriteString(d.Author.Username)
|
||||
parseddescription.WriteString(" - ")
|
||||
parseddescription.WriteString(d.Title)
|
||||
parseddescription.WriteString(`"></a>`)
|
||||
if len(x.EntityRanges) != 0 {
|
||||
d := entities[x.EntityRanges[0].Key]
|
||||
parsedDescription.WriteString(`<a href="`)
|
||||
parsedDescription.WriteString(ConvertDeviantArtUrlToSkunkyArt(d.Url))
|
||||
parsedDescription.WriteString(`"><img width="50%" src="`)
|
||||
parsedDescription.WriteString(ParseMedia(d.Media))
|
||||
parsedDescription.WriteString(`" title="`)
|
||||
parsedDescription.WriteString(d.Author.Username)
|
||||
parsedDescription.WriteString(" - ")
|
||||
parsedDescription.WriteString(d.Title)
|
||||
parsedDescription.WriteString(`"></a>`)
|
||||
}
|
||||
case "unstyled":
|
||||
if l := len(Styles); l != 0 {
|
||||
for n, r := range Styles {
|
||||
|
@ -292,31 +299,31 @@ func ParseDescription(dscr devianter.Text) string {
|
|||
tag = "h2"
|
||||
}
|
||||
|
||||
parseddescription.WriteString(x.Text[:r.From])
|
||||
parsedDescription.WriteString(x.Text[:r.From])
|
||||
if len(urls) != 0 && len(x.EntityRanges) != 0 {
|
||||
ra := &x.EntityRanges[0]
|
||||
|
||||
parseddescription.WriteString(`<a target="_blank" href="`)
|
||||
parseddescription.WriteString(urls[ra.Key])
|
||||
parseddescription.WriteString(`">`)
|
||||
parseddescription.WriteString(r.TXT)
|
||||
parseddescription.WriteString(`</a>`)
|
||||
parsedDescription.WriteString(`<a target="_blank" href="`)
|
||||
parsedDescription.WriteString(urls[ra.Key])
|
||||
parsedDescription.WriteString(`">`)
|
||||
parsedDescription.WriteString(r.TXT)
|
||||
parsedDescription.WriteString(`</a>`)
|
||||
} else if l > n+1 {
|
||||
parseddescription.WriteString(r.TXT)
|
||||
parsedDescription.WriteString(r.TXT)
|
||||
}
|
||||
parseddescription.WriteString(TagBuilder(tag, x.Text[r.To:]))
|
||||
parsedDescription.WriteString(TagBuilder(tag, x.Text[r.To:]))
|
||||
}
|
||||
} else {
|
||||
parseddescription.WriteString(x.Text)
|
||||
parsedDescription.WriteString(x.Text)
|
||||
}
|
||||
}
|
||||
parseddescription.WriteString("<br>")
|
||||
parsedDescription.WriteString("<br>")
|
||||
}
|
||||
} else if dl != 0 {
|
||||
for tt := html.NewTokenizer(strings.NewReader(dscr.Html.Markup)); ; {
|
||||
switch tt.Next() {
|
||||
case html.ErrorToken:
|
||||
return parseddescription.String()
|
||||
return parsedDescription.String()
|
||||
case html.StartTagToken, html.EndTagToken, html.SelfClosingTagToken:
|
||||
token := tt.Token()
|
||||
switch token.Data {
|
||||
|
@ -324,11 +331,11 @@ func ParseDescription(dscr devianter.Text) string {
|
|||
for _, a := range token.Attr {
|
||||
if a.Key == "href" {
|
||||
url := DeleteTrackingFromUrl(a.Val)
|
||||
parseddescription.WriteString(`<a target="_blank" href="`)
|
||||
parseddescription.WriteString(url)
|
||||
parseddescription.WriteString(`">`)
|
||||
parseddescription.WriteString(GetValueOfTag(tt))
|
||||
parseddescription.WriteString("</a> ")
|
||||
parsedDescription.WriteString(`<a target="_blank" href="`)
|
||||
parsedDescription.WriteString(url)
|
||||
parsedDescription.WriteString(`">`)
|
||||
parsedDescription.WriteString(GetValueOfTag(tt))
|
||||
parsedDescription.WriteString("</a> ")
|
||||
}
|
||||
}
|
||||
case "img":
|
||||
|
@ -344,24 +351,24 @@ func ParseDescription(dscr devianter.Text) string {
|
|||
}
|
||||
if title != "" {
|
||||
for x := -1; x < b; x++ {
|
||||
parseddescription.WriteString(`<img src="`)
|
||||
parseddescription.WriteString(uri)
|
||||
parseddescription.WriteString(`" title="`)
|
||||
parseddescription.WriteString(title)
|
||||
parseddescription.WriteString(`">`)
|
||||
parsedDescription.WriteString(`<img src="`)
|
||||
parsedDescription.WriteString(uri)
|
||||
parsedDescription.WriteString(`" title="`)
|
||||
parsedDescription.WriteString(title)
|
||||
parsedDescription.WriteString(`">`)
|
||||
}
|
||||
}
|
||||
}
|
||||
case "br", "li", "ul", "p", "b":
|
||||
parseddescription.WriteString(token.String())
|
||||
parsedDescription.WriteString(token.String())
|
||||
case "div":
|
||||
parseddescription.WriteString("<p> ")
|
||||
parsedDescription.WriteString("<p> ")
|
||||
}
|
||||
case html.TextToken:
|
||||
parseddescription.Write(tt.Text())
|
||||
parsedDescription.Write(tt.Text())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return parseddescription.String()
|
||||
return parsedDescription.String()
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package app
|
||||
|
||||
import (
|
||||
"io"
|
||||
"net/http"
|
||||
u "net/url"
|
||||
"strconv"
|
||||
|
@ -44,7 +43,7 @@ func Router() {
|
|||
|
||||
// функция, что управляет всем
|
||||
handle := func(w http.ResponseWriter, r *http.Request) {
|
||||
if h := r.Header["Scheme"]; len(h) != 0 && h[0] == "https" {
|
||||
if h := r.Header["X-Forwarded-Proto"]; len(h) != 0 && h[0] == "https" {
|
||||
Host = h[0] + "://" + r.Host
|
||||
} else {
|
||||
Host = "http://" + r.Host
|
||||
|
@ -97,10 +96,14 @@ func Router() {
|
|||
skunky.About()
|
||||
case "stylesheet":
|
||||
w.Header().Add("content-type", "text/css")
|
||||
io.WriteString(w, Templates["skunky.css"])
|
||||
wr(w, Templates["skunky.css"])
|
||||
case "favicon.ico":
|
||||
wr(w, Templates["logo.png"])
|
||||
}
|
||||
}
|
||||
|
||||
http.HandleFunc("/", handle)
|
||||
try_with_exitstatus(http.ListenAndServe(CFG.Listen, nil), 1)
|
||||
println("SkunkyArt is listening on", CFG.Listen)
|
||||
|
||||
tryWithExitStatus(http.ListenAndServe(CFG.Listen, nil), 1)
|
||||
}
|
||||
|
|
54
app/util.go
54
app/util.go
|
@ -26,7 +26,7 @@ func try(e error) {
|
|||
println(e.Error())
|
||||
}
|
||||
}
|
||||
func try_with_exitstatus(err error, code int) {
|
||||
func tryWithExitStatus(err error, code int) {
|
||||
if err != nil {
|
||||
exit(err.Error(), code)
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ func Download(url string) (d Downloaded) {
|
|||
|
||||
req, e := http.NewRequest("GET", url, nil)
|
||||
try(e)
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0 (X11; Linux x86_64; rv:123.0) Gecko/20100101 Firefox/123.0.0")
|
||||
req.Header.Set("User-Agent", CFG.UserAgent)
|
||||
|
||||
resp, e := cli.Do(req)
|
||||
try(e)
|
||||
|
@ -148,14 +148,14 @@ func InitCacheSystem() {
|
|||
try(e)
|
||||
for _, a := range dirnames {
|
||||
a = c.Path + "/" + a
|
||||
if c.Lifetime != 0 {
|
||||
if c.Lifetime != "" {
|
||||
now := time.Now().UnixMilli()
|
||||
|
||||
f, _ := os.Stat(a)
|
||||
stat := f.Sys().(*syscall.Stat_t)
|
||||
time := time.Unix(stat.Ctim.Unix()).UnixMilli()
|
||||
|
||||
if time+c.Lifetime <= now {
|
||||
if time+lifetimeParsed <= now {
|
||||
try(os.RemoveAll(a))
|
||||
}
|
||||
}
|
||||
|
@ -172,19 +172,19 @@ func InitCacheSystem() {
|
|||
func CopyTemplatesToMemory() {
|
||||
for _, dirname := range CFG.Dirs {
|
||||
dir, e := os.ReadDir(dirname)
|
||||
try_with_exitstatus(e, 1)
|
||||
tryWithExitStatus(e, 1)
|
||||
|
||||
for _, x := range dir {
|
||||
file, e := os.ReadFile(dirname + "/" + x.Name())
|
||||
try_with_exitstatus(e, 1)
|
||||
tryWithExitStatus(e, 1)
|
||||
Templates[x.Name()] = string(file)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* PARSING HELPERS */
|
||||
func ParseMedia(media devianter.Media) string {
|
||||
url := devianter.UrlFromMedia(media)
|
||||
func ParseMedia(media devianter.Media, thumb ...int) string {
|
||||
url := devianter.UrlFromMedia(media, thumb...)
|
||||
if len(url) != 0 && CFG.Proxy {
|
||||
url = url[21:]
|
||||
dot := strings.Index(url, ".")
|
||||
|
@ -197,9 +197,10 @@ func ParseMedia(media devianter.Media) string {
|
|||
func ConvertDeviantArtUrlToSkunkyArt(url string) (output string) {
|
||||
if len(url) > 32 && url[27:32] != "stash" {
|
||||
url = url[27:]
|
||||
toart := strings.Index(url, "/art/")
|
||||
if toart != -1 {
|
||||
output = UrlBuilder("post", url[:toart], url[toart+5:])
|
||||
firstshash := strings.Index(url, "/")
|
||||
lastshash := firstshash + strings.Index(url[firstshash+1:], "/")
|
||||
if lastshash != -1 {
|
||||
output = UrlBuilder("post", url[:firstshash], url[lastshash+2:])
|
||||
}
|
||||
}
|
||||
return
|
||||
|
@ -236,13 +237,9 @@ type DeviationList struct {
|
|||
|
||||
// FIXME: на некоротрых артах первая страница может вызывать полное отсутствие панели навигации.
|
||||
func (s skunkyart) NavBase(c DeviationList) string {
|
||||
// TODO: сделать понятнее
|
||||
// навигация по страницам
|
||||
var list strings.Builder
|
||||
list.WriteString("<br>")
|
||||
p := s.Page
|
||||
|
||||
// функция для генерации ссылок
|
||||
list.WriteString("<br>")
|
||||
prevrev := func(msg string, page int, onpage bool) {
|
||||
if !onpage {
|
||||
list.WriteString(`<a href="?p=`)
|
||||
|
@ -268,33 +265,26 @@ func (s skunkyart) NavBase(c DeviationList) string {
|
|||
}
|
||||
}
|
||||
|
||||
// вперёд-назад
|
||||
p := s.Page
|
||||
|
||||
if p > 1 {
|
||||
prevrev("<= Prev |", p-1, false)
|
||||
} else {
|
||||
p = 1
|
||||
}
|
||||
|
||||
if c.Pages > 0 {
|
||||
// назад
|
||||
for x := p - 6; x < p && x > 0; x++ {
|
||||
prevrev(strconv.Itoa(x), x, false)
|
||||
}
|
||||
|
||||
// вперёд
|
||||
for x := p; x <= p+6 && c.Pages > p+6; x++ {
|
||||
if x == p {
|
||||
prevrev("", x, true)
|
||||
x++
|
||||
for i, x := p-6, 0; (i <= c.Pages && i <= p+6) && x < 12; i++ {
|
||||
if i > 0 {
|
||||
var onPage bool
|
||||
if i == p {
|
||||
onPage = true
|
||||
}
|
||||
|
||||
if x > p {
|
||||
prevrev(strconv.Itoa(x), x, false)
|
||||
}
|
||||
prevrev(strconv.Itoa(i), i, onPage)
|
||||
x++
|
||||
}
|
||||
}
|
||||
|
||||
// вперёд-назад
|
||||
if c.More {
|
||||
prevrev("| Next =>", p+1, false)
|
||||
}
|
||||
|
|
|
@ -48,6 +48,7 @@ type skunkyart struct {
|
|||
}
|
||||
|
||||
SomeList string
|
||||
DDStrips string
|
||||
Deviation struct {
|
||||
Post devianter.Post
|
||||
Related string
|
||||
|
@ -93,7 +94,10 @@ func (s skunkyart) GRUser() {
|
|||
|
||||
var g devianter.Group
|
||||
g.Name = s.Query
|
||||
s.Templates.GroupUser.GR = g.GroupFunc()
|
||||
var err error
|
||||
s.Templates.GroupUser.GR, err = g.GetGroup()
|
||||
try(err)
|
||||
|
||||
group := &s.Templates.GroupUser
|
||||
|
||||
switch s.Type {
|
||||
|
@ -135,7 +139,7 @@ func (s skunkyart) GRUser() {
|
|||
group.About.Interests += interest.String()
|
||||
}
|
||||
}
|
||||
group.About.Comments = s.ParseComments(devianter.CommentsFunc(
|
||||
group.About.Comments = s.ParseComments(devianter.GetComments(
|
||||
strconv.Itoa(group.GR.Gruser.ID),
|
||||
"",
|
||||
s.Page,
|
||||
|
@ -161,9 +165,11 @@ func (s skunkyart) GRUser() {
|
|||
s.Page++
|
||||
}
|
||||
|
||||
gallery := g.Gallery(s.Page, folderid)
|
||||
gallery, err := g.GetGallery(s.Page, folderid)
|
||||
try(err)
|
||||
|
||||
if folderid > 0 {
|
||||
group.Gallery.List = s.DeviationList(gallery.Content.Results, DeviationList{
|
||||
group.Gallery.List = s.DeviationList(gallery.Content.Results, true, DeviationList{
|
||||
More: gallery.Content.HasMore,
|
||||
})
|
||||
} else {
|
||||
|
@ -204,7 +210,7 @@ func (s skunkyart) GRUser() {
|
|||
}
|
||||
|
||||
if x.Name == "folder_deviations" {
|
||||
group.Gallery.List = s.DeviationList(x.ModuleData.Folder.Deviations, DeviationList{
|
||||
group.Gallery.List = s.DeviationList(x.ModuleData.Folder.Deviations, true, DeviationList{
|
||||
Pages: x.ModuleData.Folder.Pages,
|
||||
More: x.ModuleData.Folder.HasMore,
|
||||
})
|
||||
|
@ -227,7 +233,7 @@ func (s skunkyart) Deviation(author, postname string) {
|
|||
post := &s.Templates.Deviation
|
||||
|
||||
id := id_search[len(id_search)-1]
|
||||
post.Post = devianter.DeviationFunc(id, author)
|
||||
post.Post = devianter.GetDeviation(id, author)
|
||||
|
||||
if post.Post.Deviation.TextContent.Excerpt != "" {
|
||||
post.Post.Description = ParseDescription(post.Post.Deviation.TextContent)
|
||||
|
@ -239,7 +245,7 @@ func (s skunkyart) Deviation(author, postname string) {
|
|||
post.Post.IMG = ParseMedia(post.Post.Deviation.Media)
|
||||
for _, x := range post.Post.Deviation.Extended.RelatedContent {
|
||||
if len(x.Deviations) != 0 {
|
||||
post.Related += s.DeviationList(x.Deviations)
|
||||
post.Related += s.DeviationList(x.Deviations, false)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -259,7 +265,7 @@ func (s skunkyart) Deviation(author, postname string) {
|
|||
post.Post.Comments.Cursor = ""
|
||||
}
|
||||
|
||||
post.Comments = s.ParseComments(devianter.CommentsFunc(id, post.Post.Comments.Cursor, s.Page, 1))
|
||||
post.Comments = s.ParseComments(devianter.GetComments(id, post.Post.Comments.Cursor, s.Page, 1))
|
||||
|
||||
s.ExecuteTemplate("deviantion.htm", &s)
|
||||
} else {
|
||||
|
@ -268,25 +274,38 @@ func (s skunkyart) Deviation(author, postname string) {
|
|||
}
|
||||
|
||||
func (s skunkyart) DD() {
|
||||
dd := devianter.DailyDeviationsFunc(s.Page)
|
||||
s.Templates.SomeList = s.DeviationList(dd.Deviations, DeviationList{
|
||||
dd := devianter.GetDailyDeviations(s.Page)
|
||||
var strips strings.Builder
|
||||
for _, x := range dd.Strips {
|
||||
strips.WriteString(`<h3 class="`)
|
||||
strips.WriteString(x.Codename)
|
||||
strips.WriteString(`"> <a href="#`)
|
||||
strips.WriteString(x.Codename)
|
||||
strips.WriteString(`"># </a>`)
|
||||
strips.WriteString(x.Title)
|
||||
strips.WriteString(`</h3>`)
|
||||
|
||||
strips.WriteString(s.DeviationList(x.Deviations, false))
|
||||
}
|
||||
s.Templates.DDStrips = strips.String()
|
||||
s.Templates.SomeList = s.DeviationList(dd.Deviations, true, DeviationList{
|
||||
Pages: 0,
|
||||
More: dd.HasMore,
|
||||
})
|
||||
if !s.Atom {
|
||||
s.ExecuteTemplate("list.htm", &s)
|
||||
s.ExecuteTemplate("daily.htm", &s)
|
||||
}
|
||||
}
|
||||
|
||||
func (s skunkyart) Search() {
|
||||
s.Atom = false
|
||||
var e error
|
||||
var err error
|
||||
ss := &s.Templates.Search
|
||||
switch s.Type {
|
||||
case 'a', 't':
|
||||
ss.Content, e = devianter.SearchFunc(s.Query, s.Page, s.Type)
|
||||
ss.Content, err = devianter.PerformSearch(s.Query, s.Page, s.Type)
|
||||
case 'g':
|
||||
ss.Content, e = devianter.SearchFunc(s.Query, s.Page, s.Type, s.Args.Get("usr"))
|
||||
ss.Content, err = devianter.PerformSearch(s.Query, s.Page, s.Type, s.Args.Get("usr"))
|
||||
case 'r': // скраппер, поскольку девиантартовцы зажопили гостевое API для поиска групп
|
||||
var (
|
||||
usernames = make(map[int]string)
|
||||
|
@ -333,10 +352,10 @@ func (s skunkyart) Search() {
|
|||
default:
|
||||
s.ReturnHTTPError(400)
|
||||
}
|
||||
try(e)
|
||||
try(err)
|
||||
|
||||
if s.Type != 'r' {
|
||||
ss.List = s.DeviationList(ss.Content.Results, DeviationList{
|
||||
ss.List = s.DeviationList(ss.Content.Results, false, DeviationList{
|
||||
Pages: ss.Content.Pages,
|
||||
More: ss.Content.HasMore,
|
||||
})
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue