browing profiles and notes.
This commit is contained in:
parent
3a09006a57
commit
4d77ece1df
|
@ -27,6 +27,9 @@ type Event struct {
|
|||
Ref string `db:"ref" json:"ref"` // the id of another event, optional
|
||||
Content string `db:"content" json:"content"`
|
||||
Sig string `db:"sig" json:"sig"`
|
||||
|
||||
// extra
|
||||
Rel int `db:"rel" json:"rel,omitempty"`
|
||||
}
|
||||
|
||||
// Serialize outputs a byte array that can be hashed/signed to identify/authenticate
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func queryUsers(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("content-type", "application/json")
|
||||
|
||||
keys := r.URL.Query()["keys"]
|
||||
found := make(map[string]int, len(keys))
|
||||
for _, key := range keys {
|
||||
var exists bool
|
||||
err := db.Get(&exists, `SELECT true FROM event WHERE pubkey = $1`, key)
|
||||
if err != nil {
|
||||
w.WriteHeader(500)
|
||||
log.Warn().Err(err).Str("key", key).Msg("failed to check existence")
|
||||
return
|
||||
}
|
||||
if exists {
|
||||
found[key] = 1
|
||||
}
|
||||
}
|
||||
json.NewEncoder(w).Encode(found)
|
||||
}
|
|
@ -2,144 +2,17 @@ package main
|
|||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"database/sql"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"gopkg.in/antage/eventsource.v1"
|
||||
)
|
||||
|
||||
type ErrorResponse struct {
|
||||
Error error `json:"error"`
|
||||
}
|
||||
|
||||
func queryUsers(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("content-type", "application/json")
|
||||
|
||||
keys := r.URL.Query()["keys"]
|
||||
found := make(map[string]int, len(keys))
|
||||
for _, key := range keys {
|
||||
var exists bool
|
||||
err := db.Get(&exists, `SELECT true FROM event WHERE pubkey = $1`, key)
|
||||
if err != nil {
|
||||
w.WriteHeader(500)
|
||||
log.Warn().Err(err).Str("key", key).Msg("failed to check existence")
|
||||
return
|
||||
}
|
||||
if exists {
|
||||
found[key] = 1
|
||||
}
|
||||
}
|
||||
json.NewEncoder(w).Encode(found)
|
||||
}
|
||||
|
||||
var sessions = make(map[string]*eventsource.EventSource)
|
||||
var slock = sync.Mutex{}
|
||||
|
||||
func listenUpdates(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("content-type", "application/json")
|
||||
var es eventsource.EventSource
|
||||
|
||||
session := r.URL.Query().Get("session")
|
||||
if session != "" {
|
||||
// if a session id was given, try to recover/save the es object
|
||||
slock.Lock()
|
||||
preves, ok := sessions[session]
|
||||
slock.Unlock()
|
||||
if ok {
|
||||
// end it here, just serve again the existing object
|
||||
es = *preves
|
||||
es.ServeHTTP(w, r)
|
||||
return
|
||||
} else {
|
||||
// proceed, but save the es object at the end
|
||||
defer func() {
|
||||
slock.Lock()
|
||||
defer slock.Unlock()
|
||||
sessions[session] = &es
|
||||
}()
|
||||
}
|
||||
}
|
||||
|
||||
// will return past items then track changes from these keys:
|
||||
keys, _ := r.URL.Query()["key"]
|
||||
|
||||
es = eventsource.New(
|
||||
&eventsource.Settings{
|
||||
Timeout: time.Second * 5,
|
||||
CloseOnTimeout: true,
|
||||
IdleTimeout: time.Minute * 5,
|
||||
Gzip: true,
|
||||
},
|
||||
func(r *http.Request) [][]byte {
|
||||
return [][]byte{
|
||||
[]byte("X-Accel-Buffering: no"),
|
||||
[]byte("Cache-Control: no-cache"),
|
||||
[]byte("Content-Type: text/event-stream"),
|
||||
[]byte("Connection: keep-alive"),
|
||||
[]byte("Access-Control-Allow-Origin: *"),
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
go func() {
|
||||
time.Sleep(2 * time.Second)
|
||||
es.SendRetryMessage(3 * time.Second)
|
||||
}()
|
||||
|
||||
go func() {
|
||||
for {
|
||||
time.Sleep(25 * time.Second)
|
||||
if es.ConsumersCount() == 0 {
|
||||
removeFromWatchers(&es)
|
||||
es.Close()
|
||||
return
|
||||
}
|
||||
es.SendEventMessage("", "keepalive", "")
|
||||
}
|
||||
}()
|
||||
|
||||
es.ServeHTTP(w, r)
|
||||
|
||||
// past events
|
||||
inkeys := make([]string, 0, len(keys))
|
||||
for _, key := range keys {
|
||||
// to prevent sql attack here we will check if these keys are valid 33-byte hex
|
||||
parsed, err := hex.DecodeString(key)
|
||||
if err != nil || len(parsed) != 33 {
|
||||
continue
|
||||
}
|
||||
inkeys = append(inkeys, fmt.Sprintf("'%x'", parsed))
|
||||
}
|
||||
var lastUpdates []Event
|
||||
err := db.Select(&lastUpdates, `
|
||||
SELECT *
|
||||
FROM event
|
||||
WHERE pubkey IN (`+strings.Join(inkeys, ",")+`)
|
||||
AND created_at > $1
|
||||
ORDER BY created_at DESC
|
||||
`, time.Now().AddDate(0, 0, -5).Unix())
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
w.WriteHeader(500)
|
||||
log.Warn().Err(err).Interface("keys", keys).Msg("failed to fetch updates")
|
||||
return
|
||||
}
|
||||
|
||||
for _, evt := range lastUpdates {
|
||||
jevent, _ := json.Marshal(evt)
|
||||
es.SendEventMessage(string(jevent), "event", "")
|
||||
}
|
||||
|
||||
// listen to new events
|
||||
watchPubKeys(keys, &es)
|
||||
}
|
||||
|
||||
func saveUpdate(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("content-type", "application/json")
|
||||
|
||||
|
@ -201,3 +74,87 @@ func saveUpdate(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
notifyPubKeyEvent(evt.PubKey, &evt)
|
||||
}
|
||||
|
||||
func requestUser(w http.ResponseWriter, r *http.Request) {
|
||||
es := grabNamedSession(r.URL.Query().Get("session"))
|
||||
if es == nil {
|
||||
w.WriteHeader(400)
|
||||
return
|
||||
}
|
||||
|
||||
var pubkey struct {
|
||||
PubKey string `json:"pubkey"`
|
||||
}
|
||||
json.NewDecoder(r.Body).Decode(&pubkey)
|
||||
if pubkey.PubKey == "" {
|
||||
w.WriteHeader(400)
|
||||
return
|
||||
}
|
||||
|
||||
var lastUpdates []Event
|
||||
if err := db.Select(&lastUpdates, `
|
||||
SELECT *, (SELECT count(*) FROM event AS r WHERE r.ref = event.id) AS rel
|
||||
FROM event
|
||||
WHERE pubkey = $1
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 30
|
||||
`, pubkey.PubKey); err == nil {
|
||||
for _, evt := range lastUpdates {
|
||||
jevent, _ := json.Marshal(evt)
|
||||
(*es).SendEventMessage(string(jevent), "requested", "")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func requestNote(w http.ResponseWriter, r *http.Request) {
|
||||
es := grabNamedSession(r.URL.Query().Get("session"))
|
||||
if es == nil {
|
||||
w.WriteHeader(400)
|
||||
return
|
||||
}
|
||||
|
||||
var id struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
json.NewDecoder(r.Body).Decode(&id)
|
||||
if id.Id == "" {
|
||||
w.WriteHeader(400)
|
||||
return
|
||||
}
|
||||
|
||||
go func() {
|
||||
var evt Event
|
||||
if err := db.Get(&evt, `
|
||||
SELECT * FROM event WHERE id = $1
|
||||
`, id.Id); err == nil {
|
||||
jevent, _ := json.Marshal(evt)
|
||||
(*es).SendEventMessage(string(jevent), "requested", "")
|
||||
}
|
||||
|
||||
if evt.Ref == "" {
|
||||
return
|
||||
}
|
||||
|
||||
var ref Event
|
||||
if err := db.Get(&ref, `
|
||||
SELECT * FROM event WHERE id = $1
|
||||
`, evt.Ref); err == nil {
|
||||
jevent, _ := json.Marshal(ref)
|
||||
(*es).SendEventMessage(string(jevent), "requested", "")
|
||||
}
|
||||
}()
|
||||
|
||||
go func() {
|
||||
var related []Event
|
||||
if err := db.Select(`
|
||||
SELECT * FROM event WHERE ref = $1
|
||||
-- UNION ALL
|
||||
-- SELECT * FROM event WHERE ref IN (SELECT ref FROM event WHERE ref = $1)
|
||||
`, id.Id); err == nil {
|
||||
for _, evt := range related {
|
||||
jevent, _ := json.Marshal(evt)
|
||||
(*es).SendEventMessage(string(jevent), "requested", "")
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
|
|
@ -0,0 +1,185 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"gopkg.in/antage/eventsource.v1"
|
||||
)
|
||||
|
||||
var sessions = make(map[string]*eventsource.EventSource)
|
||||
var backsessions = make(map[*eventsource.EventSource]string)
|
||||
var slock = sync.Mutex{}
|
||||
|
||||
func listenUpdates(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("content-type", "application/json")
|
||||
var es eventsource.EventSource
|
||||
|
||||
session := r.URL.Query().Get("session")
|
||||
if session != "" {
|
||||
// if a session id was given, try to recover/save the es object
|
||||
slock.Lock()
|
||||
preves, ok := sessions[session]
|
||||
slock.Unlock()
|
||||
if ok {
|
||||
// end it here, just serve again the existing object
|
||||
es = *preves
|
||||
es.ServeHTTP(w, r)
|
||||
return
|
||||
} else {
|
||||
// proceed, but save the es object at the end
|
||||
defer func() {
|
||||
slock.Lock()
|
||||
defer slock.Unlock()
|
||||
sessions[session] = &es
|
||||
backsessions[&es] = session
|
||||
}()
|
||||
}
|
||||
}
|
||||
|
||||
// will return past items then track changes from these keys:
|
||||
keys, _ := r.URL.Query()["key"]
|
||||
|
||||
es = eventsource.New(
|
||||
&eventsource.Settings{
|
||||
Timeout: time.Second * 5,
|
||||
CloseOnTimeout: true,
|
||||
IdleTimeout: time.Minute * 5,
|
||||
Gzip: true,
|
||||
},
|
||||
func(r *http.Request) [][]byte {
|
||||
return [][]byte{
|
||||
[]byte("X-Accel-Buffering: no"),
|
||||
[]byte("Cache-Control: no-cache"),
|
||||
[]byte("Content-Type: text/event-stream"),
|
||||
[]byte("Connection: keep-alive"),
|
||||
[]byte("Access-Control-Allow-Origin: *"),
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
go func() {
|
||||
time.Sleep(2 * time.Second)
|
||||
es.SendRetryMessage(3 * time.Second)
|
||||
}()
|
||||
|
||||
go func() {
|
||||
for {
|
||||
time.Sleep(25 * time.Second)
|
||||
if es.ConsumersCount() == 0 {
|
||||
removeFromWatchers(&es)
|
||||
removeFromSessions(&es)
|
||||
es.Close()
|
||||
return
|
||||
}
|
||||
es.SendEventMessage("", "keepalive", "")
|
||||
}
|
||||
}()
|
||||
|
||||
es.ServeHTTP(w, r)
|
||||
|
||||
// past events
|
||||
inkeys := make([]string, 0, len(keys))
|
||||
for _, key := range keys {
|
||||
// to prevent sql attack here we will check if these keys are valid 33-byte hex
|
||||
parsed, err := hex.DecodeString(key)
|
||||
if err != nil || len(parsed) != 33 {
|
||||
continue
|
||||
}
|
||||
inkeys = append(inkeys, fmt.Sprintf("'%x'", parsed))
|
||||
}
|
||||
var lastUpdates []Event
|
||||
err := db.Select(&lastUpdates, `
|
||||
SELECT *, (SELECT count(*) FROM event AS r WHERE r.ref = event.id) AS rel
|
||||
FROM event
|
||||
WHERE pubkey IN (`+strings.Join(inkeys, ",")+`)
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 50
|
||||
`)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
w.WriteHeader(500)
|
||||
log.Warn().Err(err).Interface("keys", keys).Msg("failed to fetch updates")
|
||||
return
|
||||
}
|
||||
|
||||
for _, evt := range lastUpdates {
|
||||
jevent, _ := json.Marshal(evt)
|
||||
es.SendEventMessage(string(jevent), "history", "")
|
||||
}
|
||||
|
||||
// listen to new events
|
||||
watchPubKeys(keys, &es)
|
||||
}
|
||||
|
||||
var watchers = make(map[string][]*eventsource.EventSource)
|
||||
var index = make(map[*eventsource.EventSource][]string)
|
||||
var wlock = sync.Mutex{}
|
||||
|
||||
func watchPubKeys(keys []string, es *eventsource.EventSource) {
|
||||
wlock.Lock()
|
||||
defer wlock.Unlock()
|
||||
|
||||
index[es] = keys
|
||||
|
||||
for _, key := range keys {
|
||||
if arr, ok := watchers[key]; ok {
|
||||
watchers[key] = append(arr, es)
|
||||
} else {
|
||||
watchers[key] = []*eventsource.EventSource{es}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func removeFromWatchers(es *eventsource.EventSource) {
|
||||
wlock.Lock()
|
||||
defer wlock.Unlock()
|
||||
|
||||
for _, key := range index[es] {
|
||||
if arr, ok := watchers[key]; ok {
|
||||
newarr := make([]*eventsource.EventSource, len(arr)-1)
|
||||
i := 0
|
||||
for _, oldes := range arr {
|
||||
if oldes == es {
|
||||
continue
|
||||
}
|
||||
newarr[i] = oldes
|
||||
i++
|
||||
}
|
||||
}
|
||||
}
|
||||
delete(index, es)
|
||||
}
|
||||
|
||||
func notifyPubKeyEvent(key string, evt *Event) {
|
||||
wlock.Lock()
|
||||
defer wlock.Unlock()
|
||||
|
||||
if arr, ok := watchers[key]; ok {
|
||||
for _, es := range arr {
|
||||
jevent, _ := json.Marshal(evt)
|
||||
(*es).SendEventMessage(string(jevent), "happening", "")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func grabNamedSession(name string) *eventsource.EventSource {
|
||||
slock.Lock()
|
||||
defer slock.Unlock()
|
||||
es, _ := sessions[name]
|
||||
return es
|
||||
}
|
||||
|
||||
func removeFromSessions(es *eventsource.EventSource) {
|
||||
slock.Lock()
|
||||
defer slock.Unlock()
|
||||
|
||||
session := backsessions[es]
|
||||
delete(backsessions, es)
|
||||
delete(sessions, session)
|
||||
}
|
|
@ -40,6 +40,8 @@ func main() {
|
|||
router.Path("/query_users").Methods("GET").HandlerFunc(queryUsers)
|
||||
router.Path("/listen_updates").Methods("GET").HandlerFunc(listenUpdates)
|
||||
router.Path("/save_update").Methods("POST").HandlerFunc(saveUpdate)
|
||||
router.Path("/request_user").Methods("POST").HandlerFunc(requestUser)
|
||||
router.Path("/request_note").Methods("POST").HandlerFunc(requestNote)
|
||||
|
||||
srv := &http.Server{
|
||||
Handler: cors.Default().Handler(router),
|
||||
|
|
|
@ -25,7 +25,8 @@ CREATE TABLE event (
|
|||
);
|
||||
|
||||
CREATE UNIQUE INDEX ididx ON event (id);
|
||||
CREATE INDEX pubkeytime ON event (pubkey, created_at);
|
||||
CREATE INDEX pubkeytimeidx ON event (pubkey, created_at);
|
||||
CREATE INDEX idxref ON event (ref);
|
||||
`)
|
||||
log.Print(err)
|
||||
return db, nil
|
||||
|
|
|
@ -25,7 +25,8 @@ CREATE TABLE event (
|
|||
);
|
||||
|
||||
CREATE UNIQUE INDEX ididx ON event (id);
|
||||
CREATE INDEX pubkeytime ON event (pubkey, created_at);
|
||||
CREATE INDEX pubkeytimeidx ON event (pubkey, created_at);
|
||||
CREATE INDEX idxref ON event (ref);
|
||||
`)
|
||||
return db, nil
|
||||
}
|
||||
|
|
|
@ -1,59 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"sync"
|
||||
|
||||
"gopkg.in/antage/eventsource.v1"
|
||||
)
|
||||
|
||||
var watchers = make(map[string][]*eventsource.EventSource)
|
||||
var index = make(map[*eventsource.EventSource][]string)
|
||||
var wlock = sync.Mutex{}
|
||||
|
||||
func watchPubKeys(keys []string, es *eventsource.EventSource) {
|
||||
wlock.Lock()
|
||||
defer wlock.Unlock()
|
||||
|
||||
index[es] = keys
|
||||
|
||||
for _, key := range keys {
|
||||
if arr, ok := watchers[key]; ok {
|
||||
watchers[key] = append(arr, es)
|
||||
} else {
|
||||
watchers[key] = []*eventsource.EventSource{es}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func removeFromWatchers(es *eventsource.EventSource) {
|
||||
wlock.Lock()
|
||||
defer wlock.Unlock()
|
||||
|
||||
for _, key := range index[es] {
|
||||
if arr, ok := watchers[key]; ok {
|
||||
newarr := make([]*eventsource.EventSource, len(arr)-1)
|
||||
i := 0
|
||||
for _, oldes := range arr {
|
||||
if oldes == es {
|
||||
continue
|
||||
}
|
||||
newarr[i] = oldes
|
||||
i++
|
||||
}
|
||||
}
|
||||
}
|
||||
delete(index, es)
|
||||
}
|
||||
|
||||
func notifyPubKeyEvent(key string, evt *Event) {
|
||||
wlock.Lock()
|
||||
defer wlock.Unlock()
|
||||
|
||||
if arr, ok := watchers[key]; ok {
|
||||
for _, es := range arr {
|
||||
jevent, _ := json.Marshal(evt)
|
||||
(*es).SendEventMessage(string(jevent), "event", "")
|
||||
}
|
||||
}
|
||||
}
|
|
@ -4,9 +4,11 @@
|
|||
"dexie": "^3.0.2",
|
||||
"elliptic": "^6.5.3",
|
||||
"insort": "^0.4.0",
|
||||
"pretty-date": "^0.2.0",
|
||||
"quick-lru": "^5.1.1",
|
||||
"sha.js": "^2.4.11",
|
||||
"vue": "^3.0.2",
|
||||
"vue-router": "^4.0.0-rc.2",
|
||||
"vue-router": "^4.0.0-rc.3",
|
||||
"vuex": "^4.0.0-rc.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -15,7 +17,7 @@
|
|||
"@rollup/plugin-json": "^4.1.0",
|
||||
"@rollup/plugin-node-resolve": "^10.0.0",
|
||||
"@vue/compiler-sfc": "^3.0.2",
|
||||
"rollup": "^2.33.1",
|
||||
"rollup": "^2.33.2",
|
||||
"rollup-plugin-css-only": "^2.1.0",
|
||||
"rollup-plugin-inject-process-env": "^1.3.1",
|
||||
"rollup-plugin-terser": "^7.0.2",
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<style>
|
||||
<style scoped>
|
||||
.nav {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
|
|
@ -17,14 +17,10 @@
|
|||
<button :disabled="publishing">Publish</button>
|
||||
</form>
|
||||
<p>Data providers: {{ readServersList }}</p>
|
||||
<div v-if="$store.state.notes.size === 0">
|
||||
<div v-if="$store.state.home.size === 0">
|
||||
<p>Didn't find any notes to show.</p>
|
||||
</div>
|
||||
<div v-else>
|
||||
<div v-for="note in $store.state.notes.values()">
|
||||
<Note v-bind="note" :key="note.id" />
|
||||
</div>
|
||||
</div>
|
||||
<List v-else :notes="$store.state.home.values()" />
|
||||
</template>
|
||||
|
||||
<script>
|
||||
|
@ -61,4 +57,4 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<style></style>
|
||||
<style scoped></style>
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
<template>
|
||||
<section class="notes">
|
||||
<div v-for="note in notes">
|
||||
<Note :note="note" :key="note.id" />
|
||||
</div>
|
||||
</section>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
props: ['notes']
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.notes {
|
||||
background: 'whitesmoke';
|
||||
}
|
||||
</style>
|
|
@ -1,19 +1,89 @@
|
|||
<template>
|
||||
<article>
|
||||
<div>{{ pubkey }}</div>
|
||||
<div>{{ content }}</div>
|
||||
<div>{{ created_at}}</div>
|
||||
<div class="pubkey">
|
||||
<a :href="'#/' + pubkey">{{ pubkey }}</a>
|
||||
</div>
|
||||
<p>{{ content }}</p>
|
||||
<em>
|
||||
<a :href="'#/n/' + id"
|
||||
><time :datetime="isoDate(created_at)" :title="isoDate(created_at)"
|
||||
>{{ humanDate(created_at) }}</time
|
||||
></a
|
||||
>
|
||||
</em>
|
||||
<div v-if="reference" class="reference">
|
||||
<Note :note="reference" />
|
||||
</div>
|
||||
</article>
|
||||
<List v-if="isFullPage" :notes="related" />
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import prettydate from 'pretty-date'
|
||||
|
||||
export default {
|
||||
props: ['id', 'content', 'pubkey', 'created_at', 'signature', 'reference']
|
||||
props: ['note'],
|
||||
computed: {
|
||||
isFullPage() {
|
||||
return !this.note
|
||||
},
|
||||
id() {
|
||||
return this.note ? this.note.id : this.$route.params.id
|
||||
},
|
||||
event() {
|
||||
return this.note || this.$store.state.browsing.get(this.id) || {}
|
||||
},
|
||||
created_at() {
|
||||
return this.event.created_at && new Date(this.event.created_at * 1000)
|
||||
},
|
||||
content() {
|
||||
return this.event.content
|
||||
},
|
||||
pubkey() {
|
||||
return this.event.pubkey
|
||||
},
|
||||
reference() {
|
||||
if (this.isFullPage && this.event.ref) {
|
||||
return this.$store.state.browsing.get(this.event.ref)
|
||||
}
|
||||
},
|
||||
related() {
|
||||
var rel = []
|
||||
if (this.isFullPage) {
|
||||
for (let k of this.$store.state.browsing.keys()) {
|
||||
if (k === 'rel:' + this.id) {
|
||||
rel.push(this.$store.state.browsing.get(k))
|
||||
}
|
||||
}
|
||||
}
|
||||
return rel
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
isoDate(d) {
|
||||
return d && d.toISOString()
|
||||
},
|
||||
humanDate(d) {
|
||||
return d && prettydate.format(d)
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
if (!this.note) {
|
||||
this.$store.dispatch('browseNote', this.$route.params.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style>
|
||||
<style scoped>
|
||||
article {
|
||||
margin: 10px 0;
|
||||
}
|
||||
p {
|
||||
margin: 0;
|
||||
padding-left: 20px;
|
||||
}
|
||||
.reference {
|
||||
background-color: 'yellow';
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -2,11 +2,17 @@
|
|||
<div>
|
||||
<h1>{{ $route.params.key }}</h1>
|
||||
<div v-if="following">
|
||||
<button @click="unfollow">Unfollow</button>
|
||||
<button
|
||||
@click="unfollow"
|
||||
:disabled="$route.params.key === $store.getters.pubKeyHex"
|
||||
>
|
||||
Unfollow
|
||||
</button>
|
||||
</div>
|
||||
<div v-else>
|
||||
<button @click="follow">Follow</button>
|
||||
</div>
|
||||
<List :notes="notes" />
|
||||
</div>
|
||||
</template>
|
||||
|
||||
|
@ -17,6 +23,17 @@
|
|||
return (
|
||||
this.$store.state.following.indexOf(this.$route.params.key) !== -1
|
||||
)
|
||||
},
|
||||
notes() {
|
||||
var notes = []
|
||||
for (let k of this.$store.state.browsing.keys()) {
|
||||
if (k === 'from:' + this.$route.params.key) {
|
||||
let note = this.$store.state.browsing.get(k)
|
||||
notes.push(note)
|
||||
}
|
||||
}
|
||||
notes.sort((a, b) => b.created_at - a.created_at)
|
||||
return notes
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
|
@ -28,6 +45,9 @@
|
|||
e.preventDefault()
|
||||
this.$store.commit('unfollow', this.$route.params.key)
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
this.$store.dispatch('browseProfile', this.$route.params.key)
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
import elliptic from 'elliptic'
|
||||
import Dexie from 'dexie'
|
||||
|
||||
export const ec = new elliptic.ec('secp256k1')
|
||||
export const db = new Dexie('db')
|
||||
|
||||
db.version(1).stores({
|
||||
settings: 'key', // as in key => value
|
||||
relays: 'host',
|
||||
following: 'pubkey',
|
||||
mynotes: 'id, kind, created_at',
|
||||
cachedmetadata: 'pubkey',
|
||||
cachednotes: 'id, pubkey, created_at'
|
||||
})
|
|
@ -1,7 +1,36 @@
|
|||
import shajs from 'sha.js'
|
||||
|
||||
import {ec} from './globals'
|
||||
|
||||
export function verifySignature(evt) {
|
||||
return true // TODO
|
||||
}
|
||||
|
||||
export function publishEvent(evt, key, hosts) {
|
||||
let hash = shajs('sha256').update(serializeEvent(evt)).digest()
|
||||
evt.id = hash.toString('hex')
|
||||
|
||||
evt.sig = ec
|
||||
.keyFromPrivate(key, 'hex')
|
||||
.sign(hash, {canonical: true})
|
||||
.toDER('hex')
|
||||
|
||||
for (let i = 0; i < hosts.length; i++) {
|
||||
let host = hosts[i]
|
||||
window
|
||||
.fetch(host + '/save_update', {
|
||||
method: 'POST',
|
||||
headers: {'content-type': 'application/json'},
|
||||
body: JSON.stringify(evt)
|
||||
})
|
||||
.then(r => {
|
||||
if (!r.ok) console.log(`failed to publish ${evt} to ${host}`)
|
||||
})
|
||||
}
|
||||
|
||||
return evt
|
||||
}
|
||||
|
||||
export function serializeEvent(evt) {
|
||||
let version = Buffer.alloc(1)
|
||||
version.writeUInt8(0)
|
||||
|
|
|
@ -6,6 +6,7 @@ import Home from './Home.html'
|
|||
import Setup from './Setup.html'
|
||||
import Profile from './Profile.html'
|
||||
import Note from './Note.html'
|
||||
import List from './List.html'
|
||||
|
||||
import store from './store'
|
||||
|
||||
|
@ -28,4 +29,5 @@ app.component('Home', Home)
|
|||
app.component('Setup', Setup)
|
||||
app.component('Profile', Profile)
|
||||
app.component('Note', Note)
|
||||
app.component('List', List)
|
||||
app.mount('#app')
|
||||
|
|
|
@ -1,22 +1,9 @@
|
|||
import {createStore, createLogger} from 'vuex'
|
||||
import elliptic from 'elliptic'
|
||||
import shajs from 'sha.js'
|
||||
import Dexie from 'dexie'
|
||||
import {SortedMap} from 'insort'
|
||||
import LRU from 'quick-lru'
|
||||
|
||||
import {verifySignature, serializeEvent} from './helpers'
|
||||
|
||||
const ec = new elliptic.ec('secp256k1')
|
||||
const db = new Dexie('db')
|
||||
|
||||
db.version(1).stores({
|
||||
settings: 'key', // as in key => value
|
||||
relays: 'host',
|
||||
following: 'pubkey',
|
||||
mynotes: 'id, kind, created_at',
|
||||
cachedmetadata: 'pubkey',
|
||||
cachednotes: 'id, pubkey, created_at'
|
||||
})
|
||||
import {verifySignature, publishEvent} from './helpers'
|
||||
import {ec, db} from './globals'
|
||||
|
||||
export default createStore({
|
||||
plugins: (process.env.NODE_ENV !== 'production'
|
||||
|
@ -24,13 +11,29 @@ export default createStore({
|
|||
: []
|
||||
).concat([init, listener]),
|
||||
state() {
|
||||
let relays = [
|
||||
{
|
||||
host: 'https://relay-profiles.bigsun.xyz',
|
||||
policy: 'rw'
|
||||
}
|
||||
]
|
||||
db.relays.bulkPut(relays)
|
||||
|
||||
let haveEventSource = new Promise(resolve => {
|
||||
setTimeout(() => {
|
||||
haveEventSource.resolve = resolve
|
||||
}, 1)
|
||||
})
|
||||
|
||||
return {
|
||||
relays: {
|
||||
'https://relay-profiles.bigsun.xyz': 'rw'
|
||||
},
|
||||
haveEventSource,
|
||||
session: new Date().getTime() + '' + Math.round(Math.random() * 100000),
|
||||
relays,
|
||||
key: ec.genKeyPair().getPrivate('hex'),
|
||||
following: [],
|
||||
notes: new SortedMap([], (a, b) => b[1] - a[1])
|
||||
home: new SortedMap([], (a, b) => b[1] - a[1]),
|
||||
metadata: new LRU({maxSize: 100}),
|
||||
browsing: new LRU({maxSize: 300})
|
||||
}
|
||||
},
|
||||
getters: {
|
||||
|
@ -38,20 +41,24 @@ export default createStore({
|
|||
pubKeyHex: state =>
|
||||
ec.keyFromPrivate(state.key, 'hex').getPublic(true, 'hex'),
|
||||
writeServers: state =>
|
||||
Object.keys(state.relays).filter(
|
||||
host => state.relays[host].indexOf('w') !== -1
|
||||
),
|
||||
state.relays
|
||||
.filter(({policy}) => policy.indexOf('w') !== -1)
|
||||
.map(({host}) => host),
|
||||
readServers: state =>
|
||||
Object.keys(state.relays).filter(
|
||||
host => state.relays[host].indexOf('r') !== -1
|
||||
)
|
||||
state.relays
|
||||
.filter(({policy}) => policy.indexOf('r') !== -1)
|
||||
.map(({host}) => host)
|
||||
},
|
||||
mutations: {
|
||||
setInit(state, {relays, key, following, notes}) {
|
||||
setInit(state, {relays, key, following, home, metadata}) {
|
||||
state.relays = relays
|
||||
state.key = key
|
||||
state.following = following
|
||||
state.notes = notes
|
||||
state.home = home
|
||||
state.metadata = metadata
|
||||
},
|
||||
gotEventSource(state) {
|
||||
state.haveEventSource.resolve()
|
||||
},
|
||||
follow(state, key) {
|
||||
state.following.push(key)
|
||||
|
@ -61,7 +68,7 @@ export default createStore({
|
|||
state.following.splice(state.following.indexOf(key), 1)
|
||||
db.following.delete(key)
|
||||
},
|
||||
receivedEvent(state, evt) {
|
||||
receivedEvent(state, {event: evt, context}) {
|
||||
if (!verifySignature(evt)) {
|
||||
console.log('received event with invalid signature', evt)
|
||||
return
|
||||
|
@ -69,49 +76,113 @@ export default createStore({
|
|||
|
||||
switch (evt.kind) {
|
||||
case 0: // setMetadata
|
||||
let meta = JSON.parse(evt.content)
|
||||
let storeable = {
|
||||
pubkey: evt.pubkey,
|
||||
time: evt.created_at,
|
||||
meta
|
||||
}
|
||||
|
||||
if (context === 'requested') {
|
||||
// just someone we're viewing
|
||||
if (!state.metadata.has(evt.pubkey)) {
|
||||
state.metadata.set(evt.pubkey, meta)
|
||||
}
|
||||
} else if (context === 'happening') {
|
||||
// an update from someone we follow that happened just now
|
||||
state.metadata.set(evt.pubkey, meta)
|
||||
db.cachedmetadata.put(storeable)
|
||||
} else if (context === 'history') {
|
||||
// someone we follow, but an old update
|
||||
db.cachedmetadata.get(evt.pubkey).then(data => {
|
||||
if (data.time < storeable.time) {
|
||||
db.cachedmetadata.put(storeable)
|
||||
}
|
||||
})
|
||||
}
|
||||
break
|
||||
case 1: // textNote
|
||||
state.notes.set([evt.id, evt.created_at], evt)
|
||||
if (context === 'requested') {
|
||||
state.browsing.set(evt.id, evt)
|
||||
state.browsing.set('from:' + evt.pubkey, evt)
|
||||
if (evt.ref && evt.ref.length) {
|
||||
state.browsing.set('rel:' + evt.ref, evt)
|
||||
}
|
||||
} else {
|
||||
state.home.set([evt.id, evt.created_at], evt)
|
||||
}
|
||||
break
|
||||
case 2: // recommendServer
|
||||
let host = evt.content
|
||||
if (context === 'requested') {
|
||||
db.relays.put({
|
||||
host,
|
||||
policy: '',
|
||||
recommender: evt.pubkey
|
||||
})
|
||||
} else {
|
||||
db.relays.put({
|
||||
host,
|
||||
policy: 'r',
|
||||
recommender: evt.pubkey
|
||||
})
|
||||
state.relays.push({host, policy: 'r'})
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
},
|
||||
actions: {
|
||||
publishNote(store, text) {
|
||||
text = text.trim()
|
||||
|
||||
let evt = {
|
||||
pubkey: store.getters.pubKeyHex,
|
||||
created_at: Math.round(new Date().getTime() / 1000),
|
||||
kind: 1,
|
||||
content: text
|
||||
async browseProfile(store, pubkey) {
|
||||
await store.state.haveEventSource
|
||||
for (let i = 0; i < store.getters.readServers.length; i++) {
|
||||
let host = store.getters.readServers[i]
|
||||
window.fetch(host + '/request_user?session=' + store.state.session, {
|
||||
method: 'POST',
|
||||
headers: {'content-type': 'application/json'},
|
||||
body: JSON.stringify({pubkey})
|
||||
})
|
||||
}
|
||||
|
||||
let hash = shajs('sha256').update(serializeEvent(evt)).digest()
|
||||
evt.id = hash.toString('hex')
|
||||
|
||||
evt.sig = ec
|
||||
.keyFromPrivate(store.state.key, 'hex')
|
||||
.sign(hash, {canonical: true})
|
||||
.toDER('hex')
|
||||
|
||||
for (let i = 0; i < store.getters.writeServers.length; i++) {
|
||||
let host = store.getters.writeServers[i]
|
||||
window
|
||||
.fetch(host + '/save_update', {
|
||||
method: 'POST',
|
||||
headers: {'content-type': 'application/json'},
|
||||
body: JSON.stringify(evt)
|
||||
})
|
||||
.then(r => {
|
||||
if (!r.ok) console.log(`failed to publish ${evt} to ${host}`)
|
||||
})
|
||||
},
|
||||
async browseNote(store, id) {
|
||||
await store.state.haveEventSource
|
||||
for (let i = 0; i < store.getters.readServers.length; i++) {
|
||||
let host = store.getters.readServers[i]
|
||||
window.fetch(host + '/request_note?session=' + store.state.session, {
|
||||
method: 'POST',
|
||||
headers: {'content-type': 'application/json'},
|
||||
body: JSON.stringify({id})
|
||||
})
|
||||
}
|
||||
},
|
||||
async publishMetadata(store, meta) {
|
||||
let evt = await publishEvent(
|
||||
{
|
||||
pubkey: store.getters.pubKeyHex,
|
||||
created_at: Math.round(new Date().getTime() / 1000),
|
||||
kind: 0,
|
||||
content: JSON.stringify(meta)
|
||||
},
|
||||
store.state.key,
|
||||
store.getters.writeServers
|
||||
)
|
||||
|
||||
db.cachedmetadata.put({pubkey: evt.pubkey, time: evt.created_at, meta})
|
||||
},
|
||||
async publishNote(store, text) {
|
||||
let evt = await publishEvent(
|
||||
{
|
||||
pubkey: store.getters.pubKeyHex,
|
||||
created_at: Math.round(new Date().getTime() / 1000),
|
||||
kind: 1,
|
||||
content: text.trim()
|
||||
},
|
||||
store.state.key,
|
||||
store.getters.writeServers
|
||||
)
|
||||
|
||||
db.mynotes.put(evt)
|
||||
store.commit('receivedEvent', evt)
|
||||
store.commit('receivedEvent', {event: evt, context: 'happening'})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -133,15 +204,16 @@ async function init(store) {
|
|||
if (rls.length === 0) {
|
||||
return store.state.relays
|
||||
}
|
||||
|
||||
var relays = {}
|
||||
rls.forEach(({host, policy}) => {
|
||||
relays[host] = policy
|
||||
})
|
||||
|
||||
return relays
|
||||
return rls
|
||||
}),
|
||||
db.following.toArray().then(r => r.map(({pubkey}) => pubkey)),
|
||||
db.following.toArray().then(r =>
|
||||
r
|
||||
.map(({pubkey}) => pubkey)
|
||||
.concat(
|
||||
// always be following thyself
|
||||
store.getters.pubKeyHex
|
||||
)
|
||||
),
|
||||
db.mynotes
|
||||
.orderBy('created_at')
|
||||
.reverse()
|
||||
|
@ -152,57 +224,80 @@ async function init(store) {
|
|||
notes.map(n => [[n.id, n.created_at], n]),
|
||||
(a, b) => b[1] - a[1]
|
||||
)
|
||||
}),
|
||||
db.cachedmetadata.toArray().then(metas => {
|
||||
var metadata = {}
|
||||
metas.forEach(({meta, pubkey}) => {
|
||||
metadata[pubkey] = meta
|
||||
})
|
||||
return metadata
|
||||
})
|
||||
])
|
||||
|
||||
store.commit('setInit', {
|
||||
key: data[0],
|
||||
relays: data[1],
|
||||
following: data[2],
|
||||
notes: data[3]
|
||||
home: data[3],
|
||||
metadata: data[4]
|
||||
})
|
||||
}
|
||||
|
||||
function listener(store) {
|
||||
var ess = []
|
||||
var ess = new Map()
|
||||
|
||||
store.subscribe(mutation => {
|
||||
if (
|
||||
mutation.type === 'setInit' ||
|
||||
mutation.type === 'changeRelay' ||
|
||||
mutation.type === 'follow' ||
|
||||
mutation.type === 'unfollow'
|
||||
) {
|
||||
ess.forEach(es => {
|
||||
es.close()
|
||||
})
|
||||
startListening()
|
||||
}
|
||||
db.relays.hook('creating', host => {
|
||||
listenToRelay(host)
|
||||
})
|
||||
|
||||
function startListening() {
|
||||
db.relays.hook('deleting', host => {
|
||||
let es = ess.get(host)
|
||||
es.close()
|
||||
ess.delete(host)
|
||||
})
|
||||
|
||||
db.following.hook('creating', () => {
|
||||
restartListeners()
|
||||
})
|
||||
|
||||
store.subscribe(mutation => {
|
||||
if (mutation.type === 'setInit') restartListeners()
|
||||
})
|
||||
|
||||
function restartListeners() {
|
||||
for (let [host, es] of ess) {
|
||||
es.close()
|
||||
ess.delete(host)
|
||||
}
|
||||
store.getters.readServers.forEach(listenToRelay)
|
||||
}
|
||||
|
||||
function listenToRelay(relayURL, i) {
|
||||
function listenToRelay(host) {
|
||||
if (store.state.following.length === 0) return
|
||||
|
||||
let qs = store.state.following.map(key => `key=${key}`).join('&')
|
||||
let es = new EventSource(relayURL + '/listen_updates?' + qs)
|
||||
ess.push(es)
|
||||
let es = new EventSource(
|
||||
host + '/listen_updates?' + qs + '&session=' + store.state.session
|
||||
)
|
||||
ess.set(host, es)
|
||||
|
||||
es.onerror = e => {
|
||||
console.log(`${relayURL}/listen_updates error: ${e.data}`)
|
||||
ess.splice(i, 1)
|
||||
console.log(`${host}/listen_updates error: ${e.data}`)
|
||||
ess.delete(host)
|
||||
}
|
||||
|
||||
store.commit('gotEventSource')
|
||||
|
||||
es.addEventListener('notice', e => {
|
||||
console.log(e.data)
|
||||
})
|
||||
|
||||
es.addEventListener('event', e => {
|
||||
let evt = JSON.parse(e.data)
|
||||
store.commit('receivedEvent', evt)
|
||||
;['history', 'happening', 'requested'].forEach(context => {
|
||||
es.addEventListener(context, e => {
|
||||
store.commit('receivedEvent', {
|
||||
event: JSON.parse(e.data),
|
||||
context
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue