import hugo@v0.49.2 files
This commit is contained in:
parent
3bcb0234c2
commit
5b044870c3
5 changed files with 933 additions and 0 deletions
201
hugov0492/LICENSE
Normal file
201
hugov0492/LICENSE
Normal file
|
@ -0,0 +1,201 @@
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
4
hugov0492/README.md
Normal file
4
hugov0492/README.md
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
## hugo v0.49.2
|
||||||
|
|
||||||
|
The files in this directory are copied from the [hugo repository](https://github.com/gohugoio/hugo)
|
||||||
|
v0.49.2 and only slightly modified. The hugo license applies and is included.
|
142
hugov0492/frontmatter.go
Normal file
142
hugov0492/frontmatter.go
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
package hugov0492
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/BurntSushi/toml"
|
||||||
|
"github.com/spf13/cast"
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FrontmatterType represents a type of frontmatter.
|
||||||
|
type FrontmatterType struct {
|
||||||
|
// Parse decodes content into a Go interface.
|
||||||
|
Parse func([]byte) (map[string]interface{}, error)
|
||||||
|
|
||||||
|
markstart, markend []byte // starting and ending delimiters
|
||||||
|
includeMark bool // include start and end mark in output
|
||||||
|
}
|
||||||
|
|
||||||
|
// DetectFrontMatter detects the type of frontmatter analysing its first character.
|
||||||
|
func DetectFrontMatter(mark rune) (f *FrontmatterType) {
|
||||||
|
switch mark {
|
||||||
|
case '-':
|
||||||
|
return &FrontmatterType{HandleYAMLMetaData, []byte(YAMLDelim), []byte(YAMLDelim), false}
|
||||||
|
case '+':
|
||||||
|
return &FrontmatterType{HandleTOMLMetaData, []byte(TOMLDelim), []byte(TOMLDelim), false}
|
||||||
|
case '{':
|
||||||
|
return &FrontmatterType{HandleJSONMetaData, []byte{'{'}, []byte{'}'}, true}
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleYAMLMetaData unmarshals YAML-encoded datum and returns a Go interface
|
||||||
|
// representing the encoded data structure.
|
||||||
|
func HandleYAMLMetaData(datum []byte) (map[string]interface{}, error) {
|
||||||
|
m := map[string]interface{}{}
|
||||||
|
err := yaml.Unmarshal(datum, &m)
|
||||||
|
|
||||||
|
// To support boolean keys, the `yaml` package unmarshals maps to
|
||||||
|
// map[interface{}]interface{}. Here we recurse through the result
|
||||||
|
// and change all maps to map[string]interface{} like we would've
|
||||||
|
// gotten from `json`.
|
||||||
|
if err == nil {
|
||||||
|
for k, v := range m {
|
||||||
|
if vv, changed := stringifyMapKeys(v); changed {
|
||||||
|
m[k] = vv
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return m, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleTOMLMetaData unmarshals TOML-encoded datum and returns a Go interface
|
||||||
|
// representing the encoded data structure.
|
||||||
|
func HandleTOMLMetaData(datum []byte) (map[string]interface{}, error) {
|
||||||
|
m := map[string]interface{}{}
|
||||||
|
datum = removeTOMLIdentifier(datum)
|
||||||
|
|
||||||
|
_, err := toml.Decode(string(datum), &m)
|
||||||
|
|
||||||
|
return m, err
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// removeTOMLIdentifier removes, if necessary, beginning and ending TOML
|
||||||
|
// frontmatter delimiters from a byte slice.
|
||||||
|
func removeTOMLIdentifier(datum []byte) []byte {
|
||||||
|
ld := len(datum)
|
||||||
|
if ld < 8 {
|
||||||
|
return datum
|
||||||
|
}
|
||||||
|
|
||||||
|
b := bytes.TrimPrefix(datum, []byte(TOMLDelim))
|
||||||
|
if ld-len(b) != 3 {
|
||||||
|
// No TOML prefix trimmed, so bail out
|
||||||
|
return datum
|
||||||
|
}
|
||||||
|
|
||||||
|
b = bytes.Trim(b, "\r\n")
|
||||||
|
return bytes.TrimSuffix(b, []byte(TOMLDelim))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleJSONMetaData unmarshals JSON-encoded datum and returns a Go interface
|
||||||
|
// representing the encoded data structure.
|
||||||
|
func HandleJSONMetaData(datum []byte) (map[string]interface{}, error) {
|
||||||
|
m := make(map[string]interface{})
|
||||||
|
|
||||||
|
if datum == nil {
|
||||||
|
// Package json returns on error on nil input.
|
||||||
|
// Return an empty map to be consistent with our other supported
|
||||||
|
// formats.
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err := json.Unmarshal(datum, &m)
|
||||||
|
return m, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// stringifyMapKeys recurses into in and changes all instances of
|
||||||
|
// map[interface{}]interface{} to map[string]interface{}. This is useful to
|
||||||
|
// work around the impedence mismatch between JSON and YAML unmarshaling that's
|
||||||
|
// described here: https://github.com/go-yaml/yaml/issues/139
|
||||||
|
//
|
||||||
|
// Inspired by https://github.com/stripe/stripe-mock, MIT licensed
|
||||||
|
func stringifyMapKeys(in interface{}) (interface{}, bool) {
|
||||||
|
switch in := in.(type) {
|
||||||
|
case []interface{}:
|
||||||
|
for i, v := range in {
|
||||||
|
if vv, replaced := stringifyMapKeys(v); replaced {
|
||||||
|
in[i] = vv
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case map[interface{}]interface{}:
|
||||||
|
res := make(map[string]interface{})
|
||||||
|
var (
|
||||||
|
ok bool
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
for k, v := range in {
|
||||||
|
var ks string
|
||||||
|
|
||||||
|
if ks, ok = k.(string); !ok {
|
||||||
|
ks, err = cast.ToStringE(k)
|
||||||
|
if err != nil {
|
||||||
|
ks = fmt.Sprintf("%v", k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if vv, replaced := stringifyMapKeys(v); replaced {
|
||||||
|
res[ks] = vv
|
||||||
|
} else {
|
||||||
|
res[ks] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res, true
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, false
|
||||||
|
}
|
399
hugov0492/parser.go
Normal file
399
hugov0492/parser.go
Normal file
|
@ -0,0 +1,399 @@
|
||||||
|
// Copyright 2016n The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package hugov0492
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// TODO(bep) Do we really have to export these?
|
||||||
|
|
||||||
|
// HTMLLead identifies the start of HTML documents.
|
||||||
|
HTMLLead = "<"
|
||||||
|
// YAMLLead identifies the start of YAML frontmatter.
|
||||||
|
YAMLLead = "-"
|
||||||
|
// YAMLDelim identifies the YAML front matter delimiter.
|
||||||
|
YAMLDelim = "---"
|
||||||
|
// TOMLLead identifies the start of TOML front matter.
|
||||||
|
TOMLLead = "+"
|
||||||
|
// TOMLDelim identifies the TOML front matter delimiter.
|
||||||
|
TOMLDelim = "+++"
|
||||||
|
// JSONLead identifies the start of JSON frontmatter.
|
||||||
|
JSONLead = "{"
|
||||||
|
// HTMLCommentStart identifies the start of HTML comment.
|
||||||
|
HTMLCommentStart = "<!--"
|
||||||
|
// HTMLCommentEnd identifies the end of HTML comment.
|
||||||
|
HTMLCommentEnd = "-->"
|
||||||
|
// BOM Unicode byte order marker
|
||||||
|
BOM = '\ufeff'
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
delims = regexp.MustCompile(
|
||||||
|
"^(" + regexp.QuoteMeta(YAMLDelim) + `\s*\n|` + regexp.QuoteMeta(TOMLDelim) + `\s*\n|` + regexp.QuoteMeta(JSONLead) + ")",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
// Page represents a parsed content page.
|
||||||
|
type Page interface {
|
||||||
|
// FrontMatter contains the raw frontmatter with relevant delimiters.
|
||||||
|
FrontMatter() []byte
|
||||||
|
|
||||||
|
// Content contains the raw page content.
|
||||||
|
Content() []byte
|
||||||
|
|
||||||
|
// IsRenderable denotes that the page should be rendered.
|
||||||
|
IsRenderable() bool
|
||||||
|
|
||||||
|
// Metadata returns the unmarshalled frontmatter data.
|
||||||
|
Metadata() (map[string]interface{}, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// page implements the Page interface.
|
||||||
|
type page struct {
|
||||||
|
render bool
|
||||||
|
frontmatter []byte
|
||||||
|
content []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
// Content returns the raw page content.
|
||||||
|
func (p *page) Content() []byte {
|
||||||
|
return p.content
|
||||||
|
}
|
||||||
|
|
||||||
|
// FrontMatter contains the raw frontmatter with relevant delimiters.
|
||||||
|
func (p *page) FrontMatter() []byte {
|
||||||
|
return p.frontmatter
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsRenderable denotes that the page should be rendered.
|
||||||
|
func (p *page) IsRenderable() bool {
|
||||||
|
return p.render
|
||||||
|
}
|
||||||
|
|
||||||
|
// Metadata returns the unmarshalled frontmatter data.
|
||||||
|
func (p *page) Metadata() (meta map[string]interface{}, err error) {
|
||||||
|
frontmatter := p.FrontMatter()
|
||||||
|
|
||||||
|
if len(frontmatter) != 0 {
|
||||||
|
fm := DetectFrontMatter(rune(frontmatter[0]))
|
||||||
|
if fm != nil {
|
||||||
|
meta, err = fm.Parse(frontmatter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadFrom reads the content from an io.Reader and constructs a page.
|
||||||
|
func ReadFrom(r io.Reader) (p Page, err error) {
|
||||||
|
reader := bufio.NewReader(r)
|
||||||
|
|
||||||
|
// chomp BOM and assume UTF-8
|
||||||
|
if err = chompBOM(reader); err != nil && err != io.EOF {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err = chompWhitespace(reader); err != nil && err != io.EOF {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err = chompFrontmatterStartComment(reader); err != nil && err != io.EOF {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
firstLine, err := peekLine(reader)
|
||||||
|
if err != nil && err != io.EOF {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
newp := new(page)
|
||||||
|
newp.render = shouldRender(firstLine)
|
||||||
|
|
||||||
|
if newp.render && isFrontMatterDelim(firstLine) {
|
||||||
|
left, right := determineDelims(firstLine)
|
||||||
|
fm, err := extractFrontMatterDelims(reader, left, right)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
newp.frontmatter = fm
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := extractContent(reader)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
newp.content = content
|
||||||
|
|
||||||
|
return newp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// chompBOM scans any leading Unicode Byte Order Markers from r.
|
||||||
|
func chompBOM(r io.RuneScanner) (err error) {
|
||||||
|
for {
|
||||||
|
c, _, err := r.ReadRune()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if c != BOM {
|
||||||
|
r.UnreadRune()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// chompWhitespace scans any leading Unicode whitespace from r.
|
||||||
|
func chompWhitespace(r io.RuneScanner) (err error) {
|
||||||
|
for {
|
||||||
|
c, _, err := r.ReadRune()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if !unicode.IsSpace(c) {
|
||||||
|
r.UnreadRune()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// chompFrontmatterStartComment checks r for a leading HTML comment. If a
|
||||||
|
// comment is found, it is read from r and then whitespace is trimmed from the
|
||||||
|
// beginning of r.
|
||||||
|
func chompFrontmatterStartComment(r *bufio.Reader) (err error) {
|
||||||
|
candidate, err := r.Peek(32)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
str := string(candidate)
|
||||||
|
if strings.HasPrefix(str, HTMLCommentStart) {
|
||||||
|
lineEnd := strings.IndexAny(str, "\n")
|
||||||
|
if lineEnd == -1 {
|
||||||
|
//TODO: if we can't find it, Peek more?
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
testStr := strings.TrimSuffix(str[0:lineEnd], "\r")
|
||||||
|
if strings.Contains(testStr, HTMLCommentEnd) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
buf := make([]byte, lineEnd)
|
||||||
|
if _, err = r.Read(buf); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err = chompWhitespace(r); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// chompFrontmatterEndComment checks r for a trailing HTML comment.
|
||||||
|
func chompFrontmatterEndComment(r *bufio.Reader) (err error) {
|
||||||
|
candidate, err := r.Peek(32)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
str := string(candidate)
|
||||||
|
lineEnd := strings.IndexAny(str, "\n")
|
||||||
|
if lineEnd == -1 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
testStr := strings.TrimSuffix(str[0:lineEnd], "\r")
|
||||||
|
if strings.Contains(testStr, HTMLCommentStart) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO: if we can't find it, Peek more?
|
||||||
|
if strings.HasSuffix(testStr, HTMLCommentEnd) {
|
||||||
|
buf := make([]byte, lineEnd)
|
||||||
|
if _, err = r.Read(buf); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err = chompWhitespace(r); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func peekLine(r *bufio.Reader) (line []byte, err error) {
|
||||||
|
firstFive, err := r.Peek(5)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
idx := bytes.IndexByte(firstFive, '\n')
|
||||||
|
if idx == -1 {
|
||||||
|
return firstFive, nil
|
||||||
|
}
|
||||||
|
idx++ // include newline.
|
||||||
|
return firstFive[:idx], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func shouldRender(lead []byte) (frontmatter bool) {
|
||||||
|
if len(lead) <= 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if bytes.Equal(lead[:1], []byte(HTMLLead)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func isFrontMatterDelim(data []byte) bool {
|
||||||
|
return delims.Match(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
func determineDelims(firstLine []byte) (left, right []byte) {
|
||||||
|
switch firstLine[0] {
|
||||||
|
case YAMLLead[0]:
|
||||||
|
return []byte(YAMLDelim), []byte(YAMLDelim)
|
||||||
|
case TOMLLead[0]:
|
||||||
|
return []byte(TOMLDelim), []byte(TOMLDelim)
|
||||||
|
case JSONLead[0]:
|
||||||
|
return []byte(JSONLead), []byte("}")
|
||||||
|
default:
|
||||||
|
panic(fmt.Sprintf("Unable to determine delims from %q", firstLine))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractFrontMatterDelims takes a frontmatter from the content bufio.Reader.
|
||||||
|
// Beginning white spaces of the bufio.Reader must be trimmed before call this
|
||||||
|
// function.
|
||||||
|
func extractFrontMatterDelims(r *bufio.Reader, left, right []byte) (fm []byte, err error) {
|
||||||
|
var (
|
||||||
|
c byte
|
||||||
|
buf bytes.Buffer
|
||||||
|
level int
|
||||||
|
sameDelim = bytes.Equal(left, right)
|
||||||
|
inQuote bool
|
||||||
|
escapeState int
|
||||||
|
)
|
||||||
|
// Frontmatter must start with a delimiter. To check it first,
|
||||||
|
// pre-reads beginning delimiter length - 1 bytes from Reader
|
||||||
|
for i := 0; i < len(left)-1; i++ {
|
||||||
|
if c, err = r.ReadByte(); err != nil {
|
||||||
|
return nil, fmt.Errorf("unable to read frontmatter at filepos %d: %s\n%.100s...", buf.Len(), err, buf.String())
|
||||||
|
}
|
||||||
|
if err = buf.WriteByte(c); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reads a character from Reader one by one and checks it matches the
|
||||||
|
// last character of one of delimiters to find the last character of
|
||||||
|
// frontmatter. If it matches, makes sure it contains the delimiter
|
||||||
|
// and if so, also checks it is followed by CR+LF or LF when YAML,
|
||||||
|
// TOML case. In JSON case, nested delimiters must be parsed and it
|
||||||
|
// is expected that the delimiter only contains one character.
|
||||||
|
for {
|
||||||
|
if c, err = r.ReadByte(); err != nil {
|
||||||
|
return nil, fmt.Errorf("unable to read frontmatter at filepos %d: %s\n%.100s...", buf.Len(), err, buf.String())
|
||||||
|
}
|
||||||
|
if err = buf.WriteByte(c); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch c {
|
||||||
|
case '"':
|
||||||
|
if escapeState != 1 {
|
||||||
|
inQuote = !inQuote
|
||||||
|
}
|
||||||
|
case '\\':
|
||||||
|
escapeState++
|
||||||
|
case left[len(left)-1]:
|
||||||
|
if sameDelim { // YAML, TOML case
|
||||||
|
if bytes.HasSuffix(buf.Bytes(), left) && (buf.Len() == len(left) || buf.Bytes()[buf.Len()-len(left)-1] == '\n') {
|
||||||
|
nextByte:
|
||||||
|
c, err = r.ReadByte()
|
||||||
|
if err != nil {
|
||||||
|
// It is ok that the end delimiter ends with EOF
|
||||||
|
if err != io.EOF || level != 1 {
|
||||||
|
return nil, fmt.Errorf("unable to read frontmatter at filepos %d: %s\n%.100s...", buf.Len(), err, buf.String())
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
switch c {
|
||||||
|
case '\n':
|
||||||
|
// ok
|
||||||
|
case ' ':
|
||||||
|
// Consume this byte and try to match again
|
||||||
|
goto nextByte
|
||||||
|
case '\r':
|
||||||
|
if err = buf.WriteByte(c); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if c, err = r.ReadByte(); err != nil {
|
||||||
|
return nil, fmt.Errorf("unable to read frontmatter at filepos %d: %s\n%.100s...", buf.Len(), err, buf.String())
|
||||||
|
}
|
||||||
|
if c != '\n' {
|
||||||
|
return nil, fmt.Errorf("frontmatter delimiter must be followed by CR+LF or LF but those can't be found at filepos %d", buf.Len())
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("frontmatter delimiter must be followed by CR+LF or LF but those can't be found at filepos %d", buf.Len())
|
||||||
|
}
|
||||||
|
if err = buf.WriteByte(c); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if level == 0 {
|
||||||
|
level = 1
|
||||||
|
} else {
|
||||||
|
level = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else { // JSON case
|
||||||
|
if !inQuote {
|
||||||
|
level++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case right[len(right)-1]: // JSON case only reaches here
|
||||||
|
if !inQuote {
|
||||||
|
level--
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if level == 0 {
|
||||||
|
// Consumes white spaces immediately behind frontmatter
|
||||||
|
if err = chompWhitespace(r); err != nil && err != io.EOF {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err = chompFrontmatterEndComment(r); err != nil && err != io.EOF {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if c != '\\' {
|
||||||
|
escapeState = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractContent(r io.Reader) (content []byte, err error) {
|
||||||
|
wr := new(bytes.Buffer)
|
||||||
|
if _, err = wr.ReadFrom(r); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return wr.Bytes(), nil
|
||||||
|
}
|
187
hugov0492/permalinks.go
Normal file
187
hugov0492/permalinks.go
Normal file
|
@ -0,0 +1,187 @@
|
||||||
|
package hugov0492
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Content struct {
|
||||||
|
Title string
|
||||||
|
Slug string
|
||||||
|
Summary string
|
||||||
|
Categories []string
|
||||||
|
Tags []string
|
||||||
|
Date time.Time
|
||||||
|
|
||||||
|
Filepath string
|
||||||
|
Subdir string
|
||||||
|
Permalink string
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
knownPermalinkAttributes = map[string]pageToPermaAttribute{
|
||||||
|
"year": pageToPermalinkDate,
|
||||||
|
"month": pageToPermalinkDate,
|
||||||
|
"monthname": pageToPermalinkDate,
|
||||||
|
"day": pageToPermalinkDate,
|
||||||
|
"weekday": pageToPermalinkDate,
|
||||||
|
"weekdayname": pageToPermalinkDate,
|
||||||
|
"yearday": pageToPermalinkDate,
|
||||||
|
"section": pageToPermalinkSection,
|
||||||
|
"title": pageToPermalinkTitle,
|
||||||
|
"slug": pageToPermalinkSlugElseTitle,
|
||||||
|
"filename": pageToPermalinkFilename,
|
||||||
|
}
|
||||||
|
|
||||||
|
attributeRegexp = regexp.MustCompile(`:\w+`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// pageToPermaAttribute is the type of a function which, given a page and a tag
|
||||||
|
// can return a string to go in that position in the page (or an error)
|
||||||
|
type pageToPermaAttribute func(*Content, string) (string, error)
|
||||||
|
|
||||||
|
// PathPattern represents a string which builds up a URL from attributes
|
||||||
|
type PathPattern string
|
||||||
|
|
||||||
|
// knownPermalinkAttributes maps :tags in a permalink specification to a
|
||||||
|
// function which, given a page and the tag, returns the resulting string
|
||||||
|
// to be used to replace that tag.
|
||||||
|
var knownPermalinkAttributes map[string]pageToPermaAttribute
|
||||||
|
|
||||||
|
var attributeRegexp *regexp.Regexp
|
||||||
|
|
||||||
|
// validate determines if a PathPattern is well-formed
|
||||||
|
func (pp PathPattern) validate() bool {
|
||||||
|
fragments := strings.Split(string(pp[1:]), "/")
|
||||||
|
var bail = false
|
||||||
|
for i := range fragments {
|
||||||
|
if bail {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if len(fragments[i]) == 0 {
|
||||||
|
bail = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
matches := attributeRegexp.FindAllStringSubmatch(fragments[i], -1)
|
||||||
|
if matches == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, match := range matches {
|
||||||
|
k := strings.ToLower(match[0][1:])
|
||||||
|
if _, ok := knownPermalinkAttributes[k]; !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Expand on a PathPattern takes a Content and returns the fully expanded Permalink
|
||||||
|
// or an error explaining the failure.
|
||||||
|
func (pp PathPattern) Expand(p *Content) (string, error) {
|
||||||
|
if !pp.validate() {
|
||||||
|
return "", fmt.Errorf("error")
|
||||||
|
}
|
||||||
|
sections := strings.Split(string(pp), "/")
|
||||||
|
for i, field := range sections {
|
||||||
|
if len(field) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
matches := attributeRegexp.FindAllStringSubmatch(field, -1)
|
||||||
|
|
||||||
|
if matches == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
newField := field
|
||||||
|
|
||||||
|
for _, match := range matches {
|
||||||
|
attr := match[0][1:]
|
||||||
|
callback, ok := knownPermalinkAttributes[attr]
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
return "", fmt.Errorf("err2")
|
||||||
|
}
|
||||||
|
|
||||||
|
newAttr, err := callback(p, attr)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("err3 %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
newField = strings.Replace(newField, match[0], newAttr, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
sections[i] = newField
|
||||||
|
}
|
||||||
|
return strings.Join(sections, "/"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func pageToPermalinkDate(p *Content, dateField string) (string, error) {
|
||||||
|
// a Content contains a Node which provides a field Date, time.Time
|
||||||
|
switch dateField {
|
||||||
|
case "year":
|
||||||
|
return strconv.Itoa(p.Date.Year()), nil
|
||||||
|
case "month":
|
||||||
|
return fmt.Sprintf("%02d", int(p.Date.Month())), nil
|
||||||
|
case "monthname":
|
||||||
|
return p.Date.Month().String(), nil
|
||||||
|
case "day":
|
||||||
|
return fmt.Sprintf("%02d", p.Date.Day()), nil
|
||||||
|
case "weekday":
|
||||||
|
return strconv.Itoa(int(p.Date.Weekday())), nil
|
||||||
|
case "weekdayname":
|
||||||
|
return p.Date.Weekday().String(), nil
|
||||||
|
case "yearday":
|
||||||
|
return strconv.Itoa(p.Date.YearDay()), nil
|
||||||
|
}
|
||||||
|
//TODO: support classic strftime escapes too
|
||||||
|
// (and pass those through despite not being in the map)
|
||||||
|
panic("coding error: should not be here")
|
||||||
|
}
|
||||||
|
|
||||||
|
// if the page has a slug, return the slug, else return the title
|
||||||
|
func pageToPermalinkSlugElseTitle(p *Content, a string) (string, error) {
|
||||||
|
if p.Slug != "" {
|
||||||
|
// Don't start or end with a -
|
||||||
|
// TODO(bep) this doesn't look good... Set the Slug once.
|
||||||
|
if strings.HasPrefix(p.Slug, "-") {
|
||||||
|
p.Slug = p.Slug[1:len(p.Slug)]
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasSuffix(p.Slug, "-") {
|
||||||
|
p.Slug = p.Slug[0 : len(p.Slug)-1]
|
||||||
|
}
|
||||||
|
return URLEscape(p.Slug)
|
||||||
|
}
|
||||||
|
return pageToPermalinkTitle(p, a)
|
||||||
|
}
|
||||||
|
|
||||||
|
// pageToPermalinkFilename returns the URL-safe form of the filename
|
||||||
|
func pageToPermalinkFilename(p *Content, _ string) (string, error) {
|
||||||
|
return URLEscape(p.Filepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pageToPermalinkTitle(p *Content, _ string) (string, error) {
|
||||||
|
return URLEscape(p.Title)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pageToPermalinkSection(p *Content, _ string) (string, error) {
|
||||||
|
return URLEscape(p.Subdir)
|
||||||
|
}
|
||||||
|
|
||||||
|
func URLEscape(uri string) (string, error) {
|
||||||
|
parsedURI, err := url.Parse(uri)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsedURI.String(), nil
|
||||||
|
}
|
Loading…
Reference in a new issue