Query source

kcoram/uplift
Jan-Lukas Else 2019-11-14 18:46:49 +01:00
parent 7795fa171d
commit 767683653a
7 changed files with 264 additions and 3 deletions

View File

@ -16,6 +16,8 @@ import (
type Entry struct {
content string
title string
date string
lastmod string
section string
tags []string
link string
@ -24,7 +26,6 @@ type Entry struct {
replyTitle string
filename string
location string
mentions []string
token string
}
@ -163,9 +164,41 @@ func generateRandomString(now time.Time, n int) string {
func WriteEntry(entry *Entry) (string, error) {
file := WriteHugoPost(entry)
err := CommitEntry(entry.filename, file, entry.title)
err := CreateFile(entry.filename, file, entry.title)
if err != nil {
return "", err
}
return entry.location, nil
}
func analyzeURL(url string) (filePath string, section string, slug string, err error) {
blogUrl, err := GetBlogURL()
if err != nil || !strings.HasPrefix(url, blogUrl) {
return
}
path := strings.TrimSuffix(strings.TrimPrefix(url, blogUrl), "/")
pathParts := strings.Split(path, "/")
filePath = "content/" + path + ".md"
section = pathParts[0]
slug = pathParts[len(pathParts)-1]
return
}
func ReadEntry(url string) (entry *Entry, err error) {
filePath, section, slug, err := analyzeURL(url)
if err != nil {
return
}
fileContent, err := ReadFile(filePath)
if err != nil {
return
}
entry, err = ReadHugoPost(fileContent)
if entry != nil {
entry.location = url
entry.filename = filePath
entry.section = section
entry.slug = slug
}
return
}

View File

@ -5,11 +5,12 @@ import (
"encoding/base64"
"encoding/json"
"errors"
"io/ioutil"
"net/http"
"net/url"
)
func CommitEntry(path string, file string, name string) error {
func CreateFile(path string, file string, name string) error {
giteaEndpoint, err := GetGiteaEndpoint()
if err != nil {
return err
@ -33,3 +34,39 @@ func CommitEntry(path string, file string, name string) error {
}
return nil
}
func ReadFile(path string) (fileContent string, err error) {
giteaEndpoint, err := GetGiteaEndpoint()
if err != nil {
return
}
giteaToken, err := GetGiteaToken()
if err != nil {
return
}
resp, err := http.Get(giteaEndpoint + url.QueryEscape(path) + "?access_token=" + giteaToken)
if err != nil || resp.StatusCode != 200 {
err = errors.New("failed to read file in repo")
return
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
err = errors.New("failed reading file in repo")
return
}
giteaResponseBody := struct {
Content string
}{}
err = json.Unmarshal(body, &giteaResponseBody)
if err != nil {
err = errors.New("failed parsing Gitea response")
return
}
decodedBytes, err := base64.StdEncoding.DecodeString(giteaResponseBody.Content)
if err != nil {
err = errors.New("failed decoding file content")
}
fileContent = string(decodedBytes)
return
}

2
go.mod
View File

@ -1,3 +1,5 @@
module codeberg.org/jlelse/hugo-micropub
go 1.13
require gopkg.in/yaml.v2 v2.2.5

3
go.sum
View File

@ -0,0 +1,3 @@
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.5 h1:ymVxjfMaHvXD8RqPRmzHHsB3VvucivSkIAvJFDI5O3c=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

14
main.go
View File

@ -3,6 +3,7 @@ package main
import (
"log"
"net/http"
"strconv"
"time"
)
@ -14,6 +15,19 @@ func handleMicroPub(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
_, _ = w.Write([]byte("[]"))
return
} else if url := r.URL.Query().Get("url"); q == "source" {
limit := r.URL.Query().Get("limit")
limitInt, err := strconv.Atoi(limit)
jsonBytes, err := QueryURL(url, limitInt)
if err != nil {
w.WriteHeader(http.StatusBadRequest)
_, _ = w.Write([]byte(err.Error()))
return
}
w.Header().Add("Content-type", "application/json")
w.WriteHeader(http.StatusOK)
_, _ = w.Write(jsonBytes)
return
} else {
w.Header().Add("Content-type", "application/json")
w.WriteHeader(http.StatusOK)

60
post.go
View File

@ -2,6 +2,9 @@ package main
import (
"bytes"
"errors"
"gopkg.in/yaml.v2"
"strings"
"time"
)
@ -39,3 +42,60 @@ func WriteHugoPost(entry *Entry) string {
}
return buff.String()
}
func readFrontMatter(frontmatter string, entry *Entry) (err error) {
parsedFrontmatter := &struct {
Title string
Date string
Lastmod string
Tags []string
ExternalURL string `yaml:"externalURL"`
Indieweb struct {
Reply struct {
Link string
Title string
}
}
}{}
err = yaml.Unmarshal([]byte(frontmatter), &parsedFrontmatter)
if err != nil {
err = errors.New("failed parsing frontmatter")
}
if len(parsedFrontmatter.Title) > 0 {
entry.title = parsedFrontmatter.Title
}
if len(parsedFrontmatter.Date) > 0 {
entry.date = parsedFrontmatter.Date
}
if len(parsedFrontmatter.Lastmod) > 0 {
entry.lastmod = parsedFrontmatter.Lastmod
}
if len(parsedFrontmatter.Tags) > 0 {
entry.tags = parsedFrontmatter.Tags
}
if len(parsedFrontmatter.ExternalURL) > 0 {
entry.link = parsedFrontmatter.ExternalURL
}
if len(parsedFrontmatter.Indieweb.Reply.Link) > 0 {
entry.replyLink = parsedFrontmatter.Indieweb.Reply.Link
}
if len(parsedFrontmatter.Indieweb.Reply.Title) > 0 {
entry.replyTitle = parsedFrontmatter.Indieweb.Reply.Title
}
return
}
func ReadHugoPost(fileContent string) (entry *Entry, err error) {
parts := strings.Split(fileContent, "---\n")
if len(parts) != 3 {
err = errors.New("empty frontmatter or content")
return
}
entry = new(Entry)
err = readFrontMatter(parts[1], entry)
if err != nil {
return
}
entry.content = strings.TrimSuffix(parts[2], "\n")
return
}

112
query.go Normal file
View File

@ -0,0 +1,112 @@
package main
import (
"encoding/json"
"errors"
"io/ioutil"
"net/http"
)
type ItemList struct {
Items []*Item `json:"items"`
}
type Item struct {
Type []string `json:"type"`
Properties *Properties `json:"properties"`
}
type Properties struct {
Name []string `json:"name"`
Published []string `json:"published"`
Updated []string `json:"updated"`
Category []string `json:"category"`
Content []string `json:"content"`
Url []string `json:"url"`
}
func QueryURL(url string, limit int) ([]byte, error) {
blogURL, err := GetBlogURL()
if err != nil {
return nil, err
}
if len(url) == 0 {
url = blogURL
}
if url == blogURL {
allPosts, err := allPosts(url)
if err != nil {
return nil, err
}
itemList := &ItemList{}
for i, postURL := range allPosts {
if limit != 0 && i == limit {
break
}
item, _ := getItem(postURL)
itemList.Items = append(itemList.Items, item)
}
jsonBytes, err := json.Marshal(itemList)
if err != nil {
err = errors.New("failed to marshal json")
return nil, err
}
return jsonBytes, err
} else {
item, err := getItem(url)
if err != nil {
return nil, err
}
jsonBytes, err := json.Marshal(item)
if err != nil {
err = errors.New("failed to marshal json")
return nil, err
}
return jsonBytes, err
}
}
func getItem(url string) (item *Item, err error) {
entry, err := ReadEntry(url)
if err != nil {
return
}
item = &Item{
Type: []string{"h-entry"},
Properties: &Properties{
Name: []string{entry.title},
Published: []string{entry.date},
Updated: []string{entry.lastmod},
Category: entry.tags,
Content: []string{entry.content},
Url: []string{entry.location},
},
}
return
}
func allPosts(url string) ([]string, error) {
jsonFeed := &struct {
Items []struct {
Url string `json:"url"`
} `json:"items"`
}{}
resp, err := http.Get(url + "feed.json")
if err != nil {
return nil, errors.New("failed to get json feed")
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, errors.New("failed to read json feed")
}
err = json.Unmarshal(body, &jsonFeed)
if err != nil {
return nil, errors.New("failed to parse json feed")
}
var allUrls []string
for _, item := range jsonFeed.Items {
allUrls = append(allUrls, item.Url)
}
return allUrls, nil
}