forked from mleku/next.orly.dev
Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
a5b6943320
|
|||
|
1fe0a395be
|
|||
|
92b3716a61
|
|||
|
5c05d741d9
|
|||
|
9a1bbbafce
|
|||
|
2fd3828010
|
|||
|
24b742bd20
|
|||
|
6f71b95734
|
|||
|
82665444f4
|
|||
|
effeae4495
|
|||
|
6b38291bf9
|
|||
|
0b69ea6d80
|
|||
|
9c85dca598
|
|||
|
0d8c518896
|
|||
|
20fbce9263
|
7
.gitignore
vendored
7
.gitignore
vendored
@@ -92,6 +92,13 @@ cmd/benchmark/data
|
||||
!strfry.conf
|
||||
!config.toml
|
||||
!.dockerignore
|
||||
!*.jsx
|
||||
!*.tsx
|
||||
!app/web/dist
|
||||
!/app/web/dist
|
||||
!/app/web/dist/*
|
||||
!/app/web/dist/**
|
||||
!bun.lock
|
||||
# ...even if they are in subdirectories
|
||||
!*/
|
||||
/blocklist.json
|
||||
|
||||
@@ -23,23 +23,29 @@ import (
|
||||
// and default values. It defines parameters for app behaviour, storage
|
||||
// locations, logging, and network settings used across the relay service.
|
||||
type C struct {
|
||||
AppName string `env:"ORLY_APP_NAME" usage:"set a name to display on information about the relay" default:"ORLY"`
|
||||
DataDir string `env:"ORLY_DATA_DIR" usage:"storage location for the event store" default:"~/.local/share/ORLY"`
|
||||
Listen string `env:"ORLY_LISTEN" default:"0.0.0.0" usage:"network listen address"`
|
||||
Port int `env:"ORLY_PORT" default:"3334" usage:"port to listen on"`
|
||||
HealthPort int `env:"ORLY_HEALTH_PORT" default:"0" usage:"optional health check HTTP port; 0 disables"`
|
||||
EnableShutdown bool `env:"ORLY_ENABLE_SHUTDOWN" default:"false" usage:"if true, expose /shutdown on the health port to gracefully stop the process (for profiling)"`
|
||||
LogLevel string `env:"ORLY_LOG_LEVEL" default:"info" usage:"relay log level: fatal error warn info debug trace"`
|
||||
DBLogLevel string `env:"ORLY_DB_LOG_LEVEL" default:"info" usage:"database log level: fatal error warn info debug trace"`
|
||||
LogToStdout bool `env:"ORLY_LOG_TO_STDOUT" default:"false" usage:"log to stdout instead of stderr"`
|
||||
Pprof string `env:"ORLY_PPROF" usage:"enable pprof in modes: cpu,memory,allocation,heap,block,goroutine,threadcreate,mutex"`
|
||||
PprofPath string `env:"ORLY_PPROF_PATH" usage:"optional directory to write pprof profiles into (inside container); default is temporary dir"`
|
||||
PprofHTTP bool `env:"ORLY_PPROF_HTTP" default:"false" usage:"if true, expose net/http/pprof on port 6060"`
|
||||
OpenPprofWeb bool `env:"ORLY_OPEN_PPROF_WEB" default:"false" usage:"if true, automatically open the pprof web viewer when profiling is enabled"`
|
||||
IPWhitelist []string `env:"ORLY_IP_WHITELIST" usage:"comma-separated list of IP addresses to allow access from, matches on prefixes to allow private subnets, eg 10.0.0 = 10.0.0.0/8"`
|
||||
Admins []string `env:"ORLY_ADMINS" usage:"comma-separated list of admin npubs"`
|
||||
Owners []string `env:"ORLY_OWNERS" usage:"comma-separated list of owner npubs, who have full control of the relay for wipe and restart and other functions"`
|
||||
ACLMode string `env:"ORLY_ACL_MODE" usage:"ACL mode: follows,none" default:"none"`
|
||||
AppName string `env:"ORLY_APP_NAME" usage:"set a name to display on information about the relay" default:"ORLY"`
|
||||
DataDir string `env:"ORLY_DATA_DIR" usage:"storage location for the event store" default:"~/.local/share/ORLY"`
|
||||
Listen string `env:"ORLY_LISTEN" default:"0.0.0.0" usage:"network listen address"`
|
||||
Port int `env:"ORLY_PORT" default:"3334" usage:"port to listen on"`
|
||||
HealthPort int `env:"ORLY_HEALTH_PORT" default:"0" usage:"optional health check HTTP port; 0 disables"`
|
||||
EnableShutdown bool `env:"ORLY_ENABLE_SHUTDOWN" default:"false" usage:"if true, expose /shutdown on the health port to gracefully stop the process (for profiling)"`
|
||||
LogLevel string `env:"ORLY_LOG_LEVEL" default:"info" usage:"relay log level: fatal error warn info debug trace"`
|
||||
DBLogLevel string `env:"ORLY_DB_LOG_LEVEL" default:"info" usage:"database log level: fatal error warn info debug trace"`
|
||||
LogToStdout bool `env:"ORLY_LOG_TO_STDOUT" default:"false" usage:"log to stdout instead of stderr"`
|
||||
Pprof string `env:"ORLY_PPROF" usage:"enable pprof in modes: cpu,memory,allocation,heap,block,goroutine,threadcreate,mutex"`
|
||||
PprofPath string `env:"ORLY_PPROF_PATH" usage:"optional directory to write pprof profiles into (inside container); default is temporary dir"`
|
||||
PprofHTTP bool `env:"ORLY_PPROF_HTTP" default:"false" usage:"if true, expose net/http/pprof on port 6060"`
|
||||
OpenPprofWeb bool `env:"ORLY_OPEN_PPROF_WEB" default:"false" usage:"if true, automatically open the pprof web viewer when profiling is enabled"`
|
||||
IPWhitelist []string `env:"ORLY_IP_WHITELIST" usage:"comma-separated list of IP addresses to allow access from, matches on prefixes to allow private subnets, eg 10.0.0 = 10.0.0.0/8"`
|
||||
Admins []string `env:"ORLY_ADMINS" usage:"comma-separated list of admin npubs"`
|
||||
Owners []string `env:"ORLY_OWNERS" usage:"comma-separated list of owner npubs, who have full control of the relay for wipe and restart and other functions"`
|
||||
ACLMode string `env:"ORLY_ACL_MODE" usage:"ACL mode: follows,none" default:"none"`
|
||||
SpiderMode string `env:"ORLY_SPIDER_MODE" usage:"spider mode: none,follow" default:"none"`
|
||||
SpiderFrequency time.Duration `env:"ORLY_SPIDER_FREQUENCY" usage:"spider frequency in seconds" default:"1h"`
|
||||
|
||||
// Web UI and dev mode settings
|
||||
WebDisableEmbedded bool `env:"ORLY_WEB_DISABLE" default:"false" usage:"disable serving the embedded web UI; useful for hot-reload during development"`
|
||||
WebDevProxyURL string `env:"ORLY_WEB_DEV_PROXY_URL" usage:"when ORLY_WEB_DISABLE is true, reverse-proxy non-API paths to this dev server URL (e.g. http://localhost:5173)"`
|
||||
}
|
||||
|
||||
// New creates and initializes a new configuration object for the relay
|
||||
|
||||
@@ -103,6 +103,20 @@ func (l *Listener) HandleEvent(msg []byte) (err error) {
|
||||
// user has write access or better, continue
|
||||
// log.D.F("user has %s access", accessLevel)
|
||||
}
|
||||
// check for protected tag (NIP-70)
|
||||
protectedTag := env.E.Tags.GetFirst([]byte("-"))
|
||||
if protectedTag != nil && acl.Registry.Active.Load() != "none" {
|
||||
// check that the pubkey of the event matches the authed pubkey
|
||||
if !utils.FastEqual(l.authedPubkey.Load(), env.E.Pubkey) {
|
||||
if err = Ok.Blocked(
|
||||
l, env,
|
||||
"protected tag may only be published by user authed to the same pubkey",
|
||||
); chk.E(err) {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
// if the event is a delete, process the delete
|
||||
if env.E.Kind == kind.EventDeletion.K {
|
||||
if err = l.HandleDelete(env); err != nil {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
"lol.mleku.dev/errorf"
|
||||
"lol.mleku.dev/log"
|
||||
"next.orly.dev/pkg/encoders/envelopes"
|
||||
"next.orly.dev/pkg/encoders/envelopes/authenvelope"
|
||||
"next.orly.dev/pkg/encoders/envelopes/closeenvelope"
|
||||
@@ -13,7 +13,7 @@ import (
|
||||
)
|
||||
|
||||
func (l *Listener) HandleMessage(msg []byte, remote string) {
|
||||
log.D.F("%s received message:\n%s", remote, msg)
|
||||
// log.D.F("%s received message:\n%s", remote, msg)
|
||||
var err error
|
||||
var t string
|
||||
var rem []byte
|
||||
@@ -32,7 +32,7 @@ func (l *Listener) HandleMessage(msg []byte, remote string) {
|
||||
// log.D.F("authenvelope: %s %s", remote, rem)
|
||||
err = l.HandleAuth(rem)
|
||||
default:
|
||||
err = errorf.E("unknown envelope type %s\n%s", t, rem)
|
||||
err = fmt.Errorf("unknown envelope type %s\n%s", t, rem)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
@@ -43,7 +43,7 @@ func (l *Listener) HandleMessage(msg []byte, remote string) {
|
||||
// )
|
||||
// },
|
||||
// )
|
||||
if err = noticeenvelope.NewFrom(err.Error()).Write(l); chk.E(err) {
|
||||
if err = noticeenvelope.NewFrom(err.Error()).Write(l); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ import (
|
||||
"github.com/dgraph-io/badger/v4"
|
||||
"lol.mleku.dev/chk"
|
||||
"lol.mleku.dev/log"
|
||||
acl "next.orly.dev/pkg/acl"
|
||||
"next.orly.dev/pkg/acl"
|
||||
"next.orly.dev/pkg/encoders/envelopes/authenvelope"
|
||||
"next.orly.dev/pkg/encoders/envelopes/closedenvelope"
|
||||
"next.orly.dev/pkg/encoders/envelopes/eoseenvelope"
|
||||
@@ -22,21 +22,21 @@ import (
|
||||
"next.orly.dev/pkg/encoders/kind"
|
||||
"next.orly.dev/pkg/encoders/reason"
|
||||
"next.orly.dev/pkg/encoders/tag"
|
||||
utils "next.orly.dev/pkg/utils"
|
||||
"next.orly.dev/pkg/utils"
|
||||
"next.orly.dev/pkg/utils/normalize"
|
||||
"next.orly.dev/pkg/utils/pointers"
|
||||
)
|
||||
|
||||
func (l *Listener) HandleReq(msg []byte) (err error) {
|
||||
log.T.F("HandleReq: START processing from %s\n%s\n", l.remote, msg)
|
||||
var rem []byte
|
||||
// log.T.F("HandleReq: START processing from %s\n%s\n", l.remote, msg)
|
||||
// var rem []byte
|
||||
env := reqenvelope.New()
|
||||
if rem, err = env.Unmarshal(msg); chk.E(err) {
|
||||
if _, err = env.Unmarshal(msg); chk.E(err) {
|
||||
return normalize.Error.Errorf(err.Error())
|
||||
}
|
||||
if len(rem) > 0 {
|
||||
log.I.F("REQ extra bytes: '%s'", rem)
|
||||
}
|
||||
// if len(rem) > 0 {
|
||||
// log.I.F("REQ extra bytes: '%s'", rem)
|
||||
// }
|
||||
// send a challenge to the client to auth if an ACL is active
|
||||
if acl.Registry.Active.Load() != "none" {
|
||||
if err = authenvelope.NewChallengeWith(l.challenge.Load()).
|
||||
@@ -57,59 +57,59 @@ func (l *Listener) HandleReq(msg []byte) (err error) {
|
||||
return
|
||||
default:
|
||||
// user has read access or better, continue
|
||||
log.D.F("user has %s access", accessLevel)
|
||||
// log.D.F("user has %s access", accessLevel)
|
||||
}
|
||||
var events event.S
|
||||
for _, f := range *env.Filters {
|
||||
idsLen := 0
|
||||
kindsLen := 0
|
||||
authorsLen := 0
|
||||
tagsLen := 0
|
||||
if f != nil {
|
||||
if f.Ids != nil {
|
||||
idsLen = f.Ids.Len()
|
||||
}
|
||||
if f.Kinds != nil {
|
||||
kindsLen = f.Kinds.Len()
|
||||
}
|
||||
if f.Authors != nil {
|
||||
authorsLen = f.Authors.Len()
|
||||
}
|
||||
if f.Tags != nil {
|
||||
tagsLen = f.Tags.Len()
|
||||
}
|
||||
}
|
||||
log.T.F(
|
||||
"REQ %s: filter summary ids=%d kinds=%d authors=%d tags=%d",
|
||||
env.Subscription, idsLen, kindsLen, authorsLen, tagsLen,
|
||||
)
|
||||
// idsLen := 0
|
||||
// kindsLen := 0
|
||||
// authorsLen := 0
|
||||
// tagsLen := 0
|
||||
// if f != nil {
|
||||
// if f.Ids != nil {
|
||||
// idsLen = f.Ids.Len()
|
||||
// }
|
||||
// if f.Kinds != nil {
|
||||
// kindsLen = f.Kinds.Len()
|
||||
// }
|
||||
// if f.Authors != nil {
|
||||
// authorsLen = f.Authors.Len()
|
||||
// }
|
||||
// if f.Tags != nil {
|
||||
// tagsLen = f.Tags.Len()
|
||||
// }
|
||||
// }
|
||||
// log.T.F(
|
||||
// "REQ %s: filter summary ids=%d kinds=%d authors=%d tags=%d",
|
||||
// env.Subscription, idsLen, kindsLen, authorsLen, tagsLen,
|
||||
// )
|
||||
if f != nil && f.Authors != nil && f.Authors.Len() > 0 {
|
||||
var authors []string
|
||||
for _, a := range f.Authors.T {
|
||||
authors = append(authors, hex.Enc(a))
|
||||
}
|
||||
log.T.F("REQ %s: authors=%v", env.Subscription, authors)
|
||||
// log.T.F("REQ %s: authors=%v", env.Subscription, authors)
|
||||
}
|
||||
if f != nil && f.Kinds != nil && f.Kinds.Len() > 0 {
|
||||
log.T.F("REQ %s: kinds=%v", env.Subscription, f.Kinds.ToUint16())
|
||||
}
|
||||
if f != nil && f.Ids != nil && f.Ids.Len() > 0 {
|
||||
var ids []string
|
||||
for _, id := range f.Ids.T {
|
||||
ids = append(ids, hex.Enc(id))
|
||||
}
|
||||
var lim any
|
||||
if pointers.Present(f.Limit) {
|
||||
lim = *f.Limit
|
||||
} else {
|
||||
lim = nil
|
||||
}
|
||||
log.T.F(
|
||||
"REQ %s: ids filter count=%d ids=%v limit=%v", env.Subscription,
|
||||
f.Ids.Len(), ids, lim,
|
||||
)
|
||||
}
|
||||
if pointers.Present(f.Limit) {
|
||||
// if f != nil && f.Kinds != nil && f.Kinds.Len() > 0 {
|
||||
// log.T.F("REQ %s: kinds=%v", env.Subscription, f.Kinds.ToUint16())
|
||||
// }
|
||||
// if f != nil && f.Ids != nil && f.Ids.Len() > 0 {
|
||||
// var ids []string
|
||||
// for _, id := range f.Ids.T {
|
||||
// ids = append(ids, hex.Enc(id))
|
||||
// }
|
||||
// // var lim any
|
||||
// // if pointers.Present(f.Limit) {
|
||||
// // lim = *f.Limit
|
||||
// // } else {
|
||||
// // lim = nil
|
||||
// // }
|
||||
// // log.T.F(
|
||||
// // "REQ %s: ids filter count=%d ids=%v limit=%v", env.Subscription,
|
||||
// // f.Ids.Len(), ids, lim,
|
||||
// // )
|
||||
// }
|
||||
if f != nil && pointers.Present(f.Limit) {
|
||||
if *f.Limit == 0 {
|
||||
continue
|
||||
}
|
||||
@@ -119,15 +119,15 @@ func (l *Listener) HandleReq(msg []byte) (err error) {
|
||||
context.Background(), 30*time.Second,
|
||||
)
|
||||
defer cancel()
|
||||
log.T.F(
|
||||
"HandleReq: About to QueryEvents for %s, main context done: %v",
|
||||
l.remote, l.ctx.Err() != nil,
|
||||
)
|
||||
// log.T.F(
|
||||
// "HandleReq: About to QueryEvents for %s, main context done: %v",
|
||||
// l.remote, l.ctx.Err() != nil,
|
||||
// )
|
||||
if events, err = l.QueryEvents(queryCtx, f); chk.E(err) {
|
||||
if errors.Is(err, badger.ErrDBClosed) {
|
||||
return
|
||||
}
|
||||
log.T.F("HandleReq: QueryEvents error for %s: %v", l.remote, err)
|
||||
// log.T.F("HandleReq: QueryEvents error for %s: %v", l.remote, err)
|
||||
err = nil
|
||||
}
|
||||
defer func() {
|
||||
@@ -135,23 +135,23 @@ func (l *Listener) HandleReq(msg []byte) (err error) {
|
||||
ev.Free()
|
||||
}
|
||||
}()
|
||||
log.T.F(
|
||||
"HandleReq: QueryEvents completed for %s, found %d events",
|
||||
l.remote, len(events),
|
||||
)
|
||||
// log.T.F(
|
||||
// "HandleReq: QueryEvents completed for %s, found %d events",
|
||||
// l.remote, len(events),
|
||||
// )
|
||||
}
|
||||
var tmp event.S
|
||||
privCheck:
|
||||
for _, ev := range events {
|
||||
if kind.IsPrivileged(ev.Kind) &&
|
||||
accessLevel != "admin" { // admins can see all events
|
||||
log.T.C(
|
||||
func() string {
|
||||
return fmt.Sprintf(
|
||||
"checking privileged event %0x", ev.ID,
|
||||
)
|
||||
},
|
||||
)
|
||||
// log.T.C(
|
||||
// func() string {
|
||||
// return fmt.Sprintf(
|
||||
// "checking privileged event %0x", ev.ID,
|
||||
// )
|
||||
// },
|
||||
// )
|
||||
pk := l.authedPubkey.Load()
|
||||
if pk == nil {
|
||||
continue
|
||||
@@ -175,26 +175,26 @@ privCheck:
|
||||
continue
|
||||
}
|
||||
if utils.FastEqual(pt, pk) {
|
||||
log.T.C(
|
||||
func() string {
|
||||
return fmt.Sprintf(
|
||||
"privileged event %s is for logged in pubkey %0x",
|
||||
ev.ID, pk,
|
||||
)
|
||||
},
|
||||
)
|
||||
// log.T.C(
|
||||
// func() string {
|
||||
// return fmt.Sprintf(
|
||||
// "privileged event %s is for logged in pubkey %0x",
|
||||
// ev.ID, pk,
|
||||
// )
|
||||
// },
|
||||
// )
|
||||
tmp = append(tmp, ev)
|
||||
continue privCheck
|
||||
}
|
||||
}
|
||||
log.T.C(
|
||||
func() string {
|
||||
return fmt.Sprintf(
|
||||
"privileged event %s does not contain the logged in pubkey %0x",
|
||||
ev.ID, pk,
|
||||
)
|
||||
},
|
||||
)
|
||||
// log.T.C(
|
||||
// func() string {
|
||||
// return fmt.Sprintf(
|
||||
// "privileged event %s does not contain the logged in pubkey %0x",
|
||||
// ev.ID, pk,
|
||||
// )
|
||||
// },
|
||||
// )
|
||||
} else {
|
||||
tmp = append(tmp, ev)
|
||||
}
|
||||
@@ -202,19 +202,19 @@ privCheck:
|
||||
events = tmp
|
||||
seen := make(map[string]struct{})
|
||||
for _, ev := range events {
|
||||
log.D.C(
|
||||
func() string {
|
||||
return fmt.Sprintf(
|
||||
"REQ %s: sending EVENT id=%s kind=%d", env.Subscription,
|
||||
hex.Enc(ev.ID), ev.Kind,
|
||||
)
|
||||
},
|
||||
)
|
||||
log.T.C(
|
||||
func() string {
|
||||
return fmt.Sprintf("event:\n%s\n", ev.Serialize())
|
||||
},
|
||||
)
|
||||
// log.D.C(
|
||||
// func() string {
|
||||
// return fmt.Sprintf(
|
||||
// "REQ %s: sending EVENT id=%s kind=%d", env.Subscription,
|
||||
// hex.Enc(ev.ID), ev.Kind,
|
||||
// )
|
||||
// },
|
||||
// )
|
||||
// log.T.C(
|
||||
// func() string {
|
||||
// return fmt.Sprintf("event:\n%s\n", ev.Serialize())
|
||||
// },
|
||||
// )
|
||||
var res *eventenvelope.Result
|
||||
if res, err = eventenvelope.NewResultWith(
|
||||
env.Subscription, ev,
|
||||
@@ -229,7 +229,7 @@ privCheck:
|
||||
}
|
||||
// write the EOSE to signal to the client that all events found have been
|
||||
// sent.
|
||||
log.T.F("sending EOSE to %s", l.remote)
|
||||
// log.T.F("sending EOSE to %s", l.remote)
|
||||
if err = eoseenvelope.NewFrom(env.Subscription).
|
||||
Write(l); chk.E(err) {
|
||||
return
|
||||
@@ -237,10 +237,10 @@ privCheck:
|
||||
// if the query was for just Ids, we know there can't be any more results,
|
||||
// so cancel the subscription.
|
||||
cancel := true
|
||||
log.T.F(
|
||||
"REQ %s: computing cancel/subscription; events_sent=%d",
|
||||
env.Subscription, len(events),
|
||||
)
|
||||
// log.T.F(
|
||||
// "REQ %s: computing cancel/subscription; events_sent=%d",
|
||||
// env.Subscription, len(events),
|
||||
// )
|
||||
var subbedFilters filter.S
|
||||
for _, f := range *env.Filters {
|
||||
if f.Ids.Len() < 1 {
|
||||
@@ -255,10 +255,10 @@ privCheck:
|
||||
}
|
||||
notFounds = append(notFounds, id)
|
||||
}
|
||||
log.T.F(
|
||||
"REQ %s: ids outstanding=%d of %d", env.Subscription,
|
||||
len(notFounds), f.Ids.Len(),
|
||||
)
|
||||
// log.T.F(
|
||||
// "REQ %s: ids outstanding=%d of %d", env.Subscription,
|
||||
// len(notFounds), f.Ids.Len(),
|
||||
// )
|
||||
// if all were found, don't add to subbedFilters
|
||||
if len(notFounds) == 0 {
|
||||
continue
|
||||
@@ -295,6 +295,6 @@ privCheck:
|
||||
return
|
||||
}
|
||||
}
|
||||
log.T.F("HandleReq: COMPLETED processing from %s", l.remote)
|
||||
// log.T.F("HandleReq: COMPLETED processing from %s", l.remote)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -97,7 +97,7 @@ whitelist:
|
||||
}
|
||||
var typ websocket.MessageType
|
||||
var msg []byte
|
||||
log.T.F("waiting for message from %s", remote)
|
||||
// log.T.F("waiting for message from %s", remote)
|
||||
|
||||
// Create a read context with timeout to prevent indefinite blocking
|
||||
readCtx, readCancel := context.WithTimeout(ctx, DefaultReadTimeout)
|
||||
@@ -152,7 +152,7 @@ whitelist:
|
||||
writeCancel()
|
||||
continue
|
||||
}
|
||||
log.T.F("received message from %s: %s", remote, string(msg))
|
||||
// log.T.F("received message from %s: %s", remote, string(msg))
|
||||
go listener.HandleMessage(msg, remote)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,6 +45,8 @@ func Run(
|
||||
publishers: publish.New(NewPublisher(ctx)),
|
||||
Admins: adminKeys,
|
||||
}
|
||||
// Initialize the user interface
|
||||
l.UserInterface()
|
||||
addr := fmt.Sprintf("%s:%d", cfg.Listen, cfg.Port)
|
||||
log.I.F("starting listener on http://%s", addr)
|
||||
go func() {
|
||||
|
||||
445
app/server.go
445
app/server.go
@@ -2,13 +2,24 @@ package app
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
"next.orly.dev/app/config"
|
||||
"next.orly.dev/pkg/acl"
|
||||
"next.orly.dev/pkg/database"
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
"next.orly.dev/pkg/protocol/auth"
|
||||
"next.orly.dev/pkg/protocol/publish"
|
||||
)
|
||||
|
||||
@@ -20,26 +31,53 @@ type Server struct {
|
||||
publishers *publish.S
|
||||
Admins [][]byte
|
||||
*database.D
|
||||
|
||||
// optional reverse proxy for dev web server
|
||||
devProxy *httputil.ReverseProxy
|
||||
|
||||
// Challenge storage for HTTP UI authentication
|
||||
challengeMutex sync.RWMutex
|
||||
challenges map[string][]byte
|
||||
}
|
||||
|
||||
func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
// log.T.C(
|
||||
// func() string {
|
||||
// return fmt.Sprintf("path %v header %v", r.URL, r.Header)
|
||||
// },
|
||||
// )
|
||||
if r.Header.Get("Upgrade") == "websocket" {
|
||||
s.HandleWebsocket(w, r)
|
||||
} else if r.Header.Get("Accept") == "application/nostr+json" {
|
||||
s.HandleRelayInfo(w, r)
|
||||
} else {
|
||||
if s.mux == nil {
|
||||
http.Error(w, "Upgrade required", http.StatusUpgradeRequired)
|
||||
} else {
|
||||
s.mux.ServeHTTP(w, r)
|
||||
}
|
||||
// Set CORS headers for all responses
|
||||
w.Header().Set("Access-Control-Allow-Origin", "*")
|
||||
w.Header().Set("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
|
||||
w.Header().Set(
|
||||
"Access-Control-Allow-Headers", "Content-Type, Authorization",
|
||||
)
|
||||
|
||||
// Handle preflight OPTIONS requests
|
||||
if r.Method == "OPTIONS" {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return
|
||||
}
|
||||
|
||||
// If this is a websocket request, only intercept the relay root path.
|
||||
// This allows other websocket paths (e.g., Vite HMR) to be handled by the dev proxy when enabled.
|
||||
if r.Header.Get("Upgrade") == "websocket" {
|
||||
if s.mux != nil && s.Config != nil && s.Config.WebDisableEmbedded && s.Config.WebDevProxyURL != "" && r.URL.Path != "/" {
|
||||
// forward to mux (which will proxy to dev server)
|
||||
s.mux.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
s.HandleWebsocket(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if r.Header.Get("Accept") == "application/nostr+json" {
|
||||
s.HandleRelayInfo(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if s.mux == nil {
|
||||
http.Error(w, "Upgrade required", http.StatusUpgradeRequired)
|
||||
return
|
||||
}
|
||||
s.mux.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
func (s *Server) ServiceURL(req *http.Request) (st string) {
|
||||
host := req.Header.Get("X-Forwarded-Host")
|
||||
if host == "" {
|
||||
@@ -70,3 +108,380 @@ func (s *Server) ServiceURL(req *http.Request) (st string) {
|
||||
}
|
||||
return proto + "://" + host
|
||||
}
|
||||
|
||||
// UserInterface sets up a basic Nostr NDK interface that allows users to log into the relay user interface
|
||||
func (s *Server) UserInterface() {
|
||||
if s.mux == nil {
|
||||
s.mux = http.NewServeMux()
|
||||
}
|
||||
|
||||
// If dev proxy is configured, initialize it
|
||||
if s.Config != nil && s.Config.WebDisableEmbedded && s.Config.WebDevProxyURL != "" {
|
||||
proxyURL := s.Config.WebDevProxyURL
|
||||
// Add default scheme if missing to avoid: proxy error: unsupported protocol scheme ""
|
||||
if !strings.Contains(proxyURL, "://") {
|
||||
proxyURL = "http://" + proxyURL
|
||||
}
|
||||
if target, err := url.Parse(proxyURL); !chk.E(err) {
|
||||
if target.Scheme == "" || target.Host == "" {
|
||||
// invalid URL, disable proxy
|
||||
log.Printf(
|
||||
"invalid ORLY_WEB_DEV_PROXY_URL: %q — disabling dev proxy\n",
|
||||
s.Config.WebDevProxyURL,
|
||||
)
|
||||
} else {
|
||||
s.devProxy = httputil.NewSingleHostReverseProxy(target)
|
||||
// Ensure Host header points to upstream for dev servers that care
|
||||
origDirector := s.devProxy.Director
|
||||
s.devProxy.Director = func(req *http.Request) {
|
||||
origDirector(req)
|
||||
req.Host = target.Host
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize challenge storage if not already done
|
||||
if s.challenges == nil {
|
||||
s.challengeMutex.Lock()
|
||||
s.challenges = make(map[string][]byte)
|
||||
s.challengeMutex.Unlock()
|
||||
}
|
||||
|
||||
// Serve the main login interface (and static assets) or proxy in dev mode
|
||||
s.mux.HandleFunc("/", s.handleLoginInterface)
|
||||
|
||||
// API endpoints for authentication
|
||||
s.mux.HandleFunc("/api/auth/challenge", s.handleAuthChallenge)
|
||||
s.mux.HandleFunc("/api/auth/login", s.handleAuthLogin)
|
||||
s.mux.HandleFunc("/api/auth/status", s.handleAuthStatus)
|
||||
s.mux.HandleFunc("/api/auth/logout", s.handleAuthLogout)
|
||||
s.mux.HandleFunc("/api/permissions/", s.handlePermissions)
|
||||
// Export endpoints
|
||||
s.mux.HandleFunc("/api/export", s.handleExport)
|
||||
s.mux.HandleFunc("/api/export/mine", s.handleExportMine)
|
||||
// Import endpoint (admin only)
|
||||
s.mux.HandleFunc("/api/import", s.handleImport)
|
||||
}
|
||||
|
||||
// handleLoginInterface serves the main user interface for login
|
||||
func (s *Server) handleLoginInterface(w http.ResponseWriter, r *http.Request) {
|
||||
// In dev mode with proxy configured, forward to dev server
|
||||
if s.Config != nil && s.Config.WebDisableEmbedded && s.devProxy != nil {
|
||||
s.devProxy.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
// If embedded UI is disabled but no proxy configured, return a helpful message
|
||||
if s.Config != nil && s.Config.WebDisableEmbedded {
|
||||
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
w.Write([]byte("Web UI disabled (ORLY_WEB_DISABLE=true). Run the web app in standalone dev mode (e.g., npm run dev) or set ORLY_WEB_DEV_PROXY_URL to proxy through this server."))
|
||||
return
|
||||
}
|
||||
// Default: serve embedded React app
|
||||
fileServer := http.FileServer(GetReactAppFS())
|
||||
fileServer.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
// handleAuthChallenge generates and returns an authentication challenge
|
||||
func (s *Server) handleAuthChallenge(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodGet {
|
||||
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
// Generate a proper challenge using the auth package
|
||||
challenge := auth.GenerateChallenge()
|
||||
challengeHex := hex.Enc(challenge)
|
||||
|
||||
// Store the challenge using the hex value as the key for easy lookup
|
||||
s.challengeMutex.Lock()
|
||||
s.challenges[challengeHex] = challenge
|
||||
s.challengeMutex.Unlock()
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write([]byte(`{"challenge": "` + challengeHex + `"}`))
|
||||
}
|
||||
|
||||
// handleAuthLogin processes authentication requests
|
||||
func (s *Server) handleAuthLogin(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodPost {
|
||||
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
// Read the request body
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if chk.E(err) {
|
||||
w.Write([]byte(`{"success": false, "error": "Failed to read request body"}`))
|
||||
return
|
||||
}
|
||||
|
||||
// Parse the signed event
|
||||
var evt event.E
|
||||
if err = json.Unmarshal(body, &evt); chk.E(err) {
|
||||
w.Write([]byte(`{"success": false, "error": "Invalid event format"}`))
|
||||
return
|
||||
}
|
||||
|
||||
// Extract the challenge from the event to look up the stored challenge
|
||||
challengeTag := evt.Tags.GetFirst([]byte("challenge"))
|
||||
if challengeTag == nil {
|
||||
w.Write([]byte(`{"success": false, "error": "Challenge tag missing from event"}`))
|
||||
return
|
||||
}
|
||||
|
||||
challengeHex := string(challengeTag.Value())
|
||||
|
||||
// Retrieve the stored challenge
|
||||
s.challengeMutex.RLock()
|
||||
_, exists := s.challenges[challengeHex]
|
||||
s.challengeMutex.RUnlock()
|
||||
|
||||
if !exists {
|
||||
w.Write([]byte(`{"success": false, "error": "Invalid or expired challenge"}`))
|
||||
return
|
||||
}
|
||||
|
||||
// Clean up the used challenge
|
||||
s.challengeMutex.Lock()
|
||||
delete(s.challenges, challengeHex)
|
||||
s.challengeMutex.Unlock()
|
||||
|
||||
relayURL := s.ServiceURL(r)
|
||||
|
||||
// Validate the authentication event with the correct challenge
|
||||
// The challenge in the event tag is hex-encoded, so we need to pass the hex string as bytes
|
||||
ok, err := auth.Validate(&evt, []byte(challengeHex), relayURL)
|
||||
if chk.E(err) || !ok {
|
||||
errorMsg := "Authentication validation failed"
|
||||
if err != nil {
|
||||
errorMsg = err.Error()
|
||||
}
|
||||
w.Write([]byte(`{"success": false, "error": "` + errorMsg + `"}`))
|
||||
return
|
||||
}
|
||||
|
||||
// Authentication successful: set a simple session cookie with the pubkey
|
||||
cookie := &http.Cookie{
|
||||
Name: "orly_auth",
|
||||
Value: hex.Enc(evt.Pubkey),
|
||||
Path: "/",
|
||||
HttpOnly: true,
|
||||
SameSite: http.SameSiteLaxMode,
|
||||
MaxAge: 60 * 60 * 24 * 30, // 30 days
|
||||
}
|
||||
http.SetCookie(w, cookie)
|
||||
w.Write([]byte(`{"success": true, "pubkey": "` + hex.Enc(evt.Pubkey) + `", "message": "Authentication successful"}`))
|
||||
}
|
||||
|
||||
// handleAuthStatus returns the current authentication status
|
||||
func (s *Server) handleAuthStatus(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodGet {
|
||||
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
// Check for auth cookie
|
||||
if c, err := r.Cookie("orly_auth"); err == nil && c.Value != "" {
|
||||
// Validate pubkey format (hex)
|
||||
if _, err := hex.Dec(c.Value); !chk.E(err) {
|
||||
w.Write([]byte(`{"authenticated": true, "pubkey": "` + c.Value + `"}`))
|
||||
return
|
||||
}
|
||||
}
|
||||
w.Write([]byte(`{"authenticated": false}`))
|
||||
}
|
||||
|
||||
// handleAuthLogout clears the auth cookie
|
||||
func (s *Server) handleAuthLogout(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodPost {
|
||||
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
// Expire the cookie
|
||||
http.SetCookie(
|
||||
w, &http.Cookie{
|
||||
Name: "orly_auth",
|
||||
Value: "",
|
||||
Path: "/",
|
||||
MaxAge: -1,
|
||||
HttpOnly: true,
|
||||
SameSite: http.SameSiteLaxMode,
|
||||
},
|
||||
)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write([]byte(`{"success": true}`))
|
||||
}
|
||||
|
||||
// handlePermissions returns the permission level for a given pubkey
|
||||
func (s *Server) handlePermissions(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodGet {
|
||||
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
// Extract pubkey from URL path
|
||||
pubkeyHex := strings.TrimPrefix(r.URL.Path, "/api/permissions/")
|
||||
if pubkeyHex == "" || pubkeyHex == "/" {
|
||||
http.Error(w, "Invalid pubkey", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Convert hex to binary pubkey
|
||||
pubkey, err := hex.Dec(pubkeyHex)
|
||||
if chk.E(err) {
|
||||
http.Error(w, "Invalid pubkey format", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Get access level using acl registry
|
||||
permission := acl.Registry.GetAccessLevel(pubkey, r.RemoteAddr)
|
||||
|
||||
// Set content type and write JSON response
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
// Format response as proper JSON
|
||||
response := struct {
|
||||
Permission string `json:"permission"`
|
||||
}{
|
||||
Permission: permission,
|
||||
}
|
||||
|
||||
// Marshal and write the response
|
||||
jsonData, err := json.Marshal(response)
|
||||
if chk.E(err) {
|
||||
http.Error(
|
||||
w, "Error generating response", http.StatusInternalServerError,
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write(jsonData)
|
||||
}
|
||||
|
||||
// handleExport streams all events as JSONL (NDJSON). Admins only.
|
||||
func (s *Server) handleExport(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodGet {
|
||||
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
// Require auth cookie
|
||||
c, err := r.Cookie("orly_auth")
|
||||
if err != nil || c.Value == "" {
|
||||
http.Error(w, "Not authenticated", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
requesterPubHex := c.Value
|
||||
requesterPub, err := hex.Dec(requesterPubHex)
|
||||
if chk.E(err) {
|
||||
http.Error(w, "Invalid auth cookie", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
// Check permissions
|
||||
if acl.Registry.GetAccessLevel(requesterPub, r.RemoteAddr) != "admin" {
|
||||
http.Error(w, "Forbidden", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
// Optional filtering by pubkey(s)
|
||||
var pks [][]byte
|
||||
q := r.URL.Query()
|
||||
for _, pkHex := range q["pubkey"] {
|
||||
if pkHex == "" {
|
||||
continue
|
||||
}
|
||||
if pk, err := hex.Dec(pkHex); !chk.E(err) {
|
||||
pks = append(pks, pk)
|
||||
}
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/x-ndjson")
|
||||
filename := "events-" + time.Now().UTC().Format("20060102-150405Z") + ".jsonl"
|
||||
w.Header().Set("Content-Disposition", "attachment; filename=\""+filename+"\"")
|
||||
|
||||
// Stream export
|
||||
s.D.Export(s.Ctx, w, pks...)
|
||||
}
|
||||
|
||||
|
||||
// handleExportMine streams only the authenticated user's events as JSONL (NDJSON).
|
||||
func (s *Server) handleExportMine(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodGet {
|
||||
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
// Require auth cookie
|
||||
c, err := r.Cookie("orly_auth")
|
||||
if err != nil || c.Value == "" {
|
||||
http.Error(w, "Not authenticated", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
pubkey, err := hex.Dec(c.Value)
|
||||
if chk.E(err) {
|
||||
http.Error(w, "Invalid auth cookie", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/x-ndjson")
|
||||
filename := "my-events-" + time.Now().UTC().Format("20060102-150405Z") + ".jsonl"
|
||||
w.Header().Set("Content-Disposition", "attachment; filename=\""+filename+"\"")
|
||||
|
||||
// Stream export for this user's pubkey only
|
||||
s.D.Export(s.Ctx, w, pubkey)
|
||||
}
|
||||
|
||||
// handleImport receives a JSONL/NDJSON file or body and enqueues an async import. Admins only.
|
||||
func (s *Server) handleImport(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodPost {
|
||||
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
// Require auth cookie
|
||||
c, err := r.Cookie("orly_auth")
|
||||
if err != nil || c.Value == "" {
|
||||
http.Error(w, "Not authenticated", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
requesterPub, err := hex.Dec(c.Value)
|
||||
if chk.E(err) {
|
||||
http.Error(w, "Invalid auth cookie", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
// Admins only
|
||||
if acl.Registry.GetAccessLevel(requesterPub, r.RemoteAddr) != "admin" {
|
||||
http.Error(w, "Forbidden", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
ct := r.Header.Get("Content-Type")
|
||||
if strings.HasPrefix(ct, "multipart/form-data") {
|
||||
if err := r.ParseMultipartForm(32 << 20); chk.E(err) { // 32MB memory, rest to temp files
|
||||
http.Error(w, "Failed to parse form", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
file, _, err := r.FormFile("file")
|
||||
if chk.E(err) {
|
||||
http.Error(w, "Missing file", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
s.D.Import(file)
|
||||
} else {
|
||||
if r.Body == nil {
|
||||
http.Error(w, "Empty request body", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
s.D.Import(r.Body)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusAccepted)
|
||||
w.Write([]byte(`{"success": true, "message": "Import started"}`))
|
||||
}
|
||||
|
||||
19
app/web.go
Normal file
19
app/web.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
//go:embed web/dist
|
||||
var reactAppFS embed.FS
|
||||
|
||||
// GetReactAppFS returns a http.FileSystem from the embedded React app
|
||||
func GetReactAppFS() http.FileSystem {
|
||||
webDist, err := fs.Sub(reactAppFS, "web/dist")
|
||||
if err != nil {
|
||||
panic("Failed to load embedded web app: " + err.Error())
|
||||
}
|
||||
return http.FS(webDist)
|
||||
}
|
||||
30
app/web/.gitignore
vendored
Normal file
30
app/web/.gitignore
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
# Dependencies
|
||||
node_modules
|
||||
.pnp
|
||||
.pnp.js
|
||||
|
||||
# Bun
|
||||
.bunfig.toml
|
||||
bun.lockb
|
||||
|
||||
# Build directories
|
||||
build
|
||||
|
||||
# Cache and logs
|
||||
.cache
|
||||
.temp
|
||||
.log
|
||||
*.log
|
||||
|
||||
# Environment variables
|
||||
.env
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
# Editor directories and files
|
||||
.idea
|
||||
.vscode
|
||||
*.swp
|
||||
*.swo
|
||||
89
app/web/README.md
Normal file
89
app/web/README.md
Normal file
@@ -0,0 +1,89 @@
|
||||
# Orly Web Application
|
||||
|
||||
This is a React web application that uses Bun for building and bundling, and is automatically embedded into the Go binary when built.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [Bun](https://bun.sh/) - JavaScript runtime and toolkit
|
||||
- Go 1.16+ (for embedding functionality)
|
||||
|
||||
## Development
|
||||
|
||||
There are two ways to develop the web app:
|
||||
|
||||
1) Standalone (recommended for hot reload)
|
||||
- Start the Go relay with the embedded web UI disabled so the React app can run on its own dev server with HMR.
|
||||
- Configure the relay via environment variables:
|
||||
|
||||
```bash
|
||||
# In another shell at repo root
|
||||
export ORLY_WEB_DISABLE=true
|
||||
# Optional: if you want same-origin URLs, you can set a proxy target and access the relay on the same port
|
||||
# export ORLY_WEB_DEV_PROXY_URL=http://localhost:5173
|
||||
|
||||
# Start the relay as usual
|
||||
go run .
|
||||
```
|
||||
|
||||
- Then start the React dev server:
|
||||
|
||||
```bash
|
||||
cd app/web
|
||||
bun install
|
||||
bun dev
|
||||
```
|
||||
|
||||
When ORLY_WEB_DISABLE=true is set, the Go server still serves the API and websocket endpoints and sends permissive CORS headers, so the dev server can access them cross-origin. If ORLY_WEB_DEV_PROXY_URL is set, the Go server will reverse-proxy non-/api paths to the dev server so you can use the same origin.
|
||||
|
||||
2) Embedded (no hot reload)
|
||||
- Build the web app and run the Go server with defaults:
|
||||
|
||||
```bash
|
||||
cd app/web
|
||||
bun install
|
||||
bun run build
|
||||
cd ../../
|
||||
go run .
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
The React application needs to be built before compiling the Go binary to ensure that the embedded files are available:
|
||||
|
||||
```bash
|
||||
# Build the React application
|
||||
cd app/web
|
||||
bun install
|
||||
bun run build
|
||||
|
||||
# Build the Go binary from project root
|
||||
cd ../../
|
||||
go build
|
||||
```
|
||||
|
||||
## How it works
|
||||
|
||||
1. The React application is built to the `app/web/dist` directory
|
||||
2. The Go embed directive in `app/web.go` embeds these files into the binary
|
||||
3. When the server runs, it serves the embedded React app at the root path
|
||||
|
||||
## Build Automation
|
||||
|
||||
You can create a shell script to automate the build process:
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# build.sh
|
||||
echo "Building React app..."
|
||||
cd app/web
|
||||
bun install
|
||||
bun run build
|
||||
|
||||
echo "Building Go binary..."
|
||||
cd ../../
|
||||
go build
|
||||
|
||||
echo "Build complete!"
|
||||
```
|
||||
|
||||
Make it executable with `chmod +x build.sh` and run with `./build.sh`.
|
||||
36
app/web/bun.lock
Normal file
36
app/web/bun.lock
Normal file
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "orly-web",
|
||||
"dependencies": {
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
},
|
||||
"devDependencies": {
|
||||
"bun-types": "latest",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@types/node": ["@types/node@24.5.2", "", { "dependencies": { "undici-types": "~7.12.0" } }, "sha512-FYxk1I7wPv3K2XBaoyH2cTnocQEu8AOZ60hPbsyukMPLv5/5qr7V1i8PLHdl6Zf87I+xZXFvPCXYjiTFq+YSDQ=="],
|
||||
|
||||
"@types/react": ["@types/react@19.1.13", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-hHkbU/eoO3EG5/MZkuFSKmYqPbSVk5byPFa3e7y/8TybHiLMACgI8seVYlicwk7H5K/rI2px9xrQp/C+AUDTiQ=="],
|
||||
|
||||
"bun-types": ["bun-types@1.2.22", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-hwaAu8tct/Zn6Zft4U9BsZcXkYomzpHJX28ofvx7k0Zz2HNz54n1n+tDgxoWFGB4PcFvJXJQloPhaV2eP3Q6EA=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
|
||||
"react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="],
|
||||
|
||||
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
|
||||
|
||||
"undici-types": ["undici-types@7.12.0", "", {}, "sha512-goOacqME2GYyOZZfb5Lgtu+1IDmAlAEu5xnD3+xTzS10hT0vzpf0SPjkXwAw9Jm+4n/mQGDP3LO8CPbYROeBfQ=="],
|
||||
}
|
||||
}
|
||||
1
app/web/dist/index-zhtd763e.css
vendored
Normal file
1
app/web/dist/index-zhtd763e.css
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*,:before,:after{box-sizing:border-box;border:0 solid #e5e7eb}html,body,#root{height:100%}html{-webkit-text-size-adjust:100%;tab-size:4;font-family:ui-sans-serif,system-ui,-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Noto Sans,Ubuntu,Cantarell,Helvetica Neue,Helvetica,Arial,"\"Apple Color Emoji\"","\"Segoe UI Emoji\"";line-height:1.5}body{margin:0}button,input{font:inherit;color:inherit}img{display:block;max-width:100%;height:auto}.sticky{position:sticky}.relative{position:relative}.absolute{position:absolute}.top-0{top:0}.left-0{left:0}.inset-0{inset:0}.z-50{z-index:50}.z-10{z-index:10}.block{display:block}.flex{display:flex}.items-center{align-items: center}.justify-start{justify-content:flex-start}.justify-center{justify-content:center}.justify-end{justify-content:flex-end}.flex-grow{flex-grow:1}.shrink-0{flex-shrink:0}.overflow-hidden{overflow:hidden}.w-full{width:100%}.w-auto{width:auto}.w-16{width:4rem}.h-full{height:100%}.h-16{height:4rem}.aspect-square{aspect-ratio:1}.max-w-3xl{max-width:48rem}.p-0{padding:0}.p-2{padding:.5rem}.p-3{padding:.75rem}.p-6{padding:1.5rem}.px-2{padding-left:.5rem;padding-right:.5rem}.mr-0{margin-right:0}.mr-2{margin-right:.5rem}.mt-2{margin-top:.5rem}.mt-5{margin-top:1.25rem}.mb-1{margin-bottom:.25rem}.mb-2{margin-bottom:.5rem}.mb-4{margin-bottom:1rem}.mb-5{margin-bottom:1.25rem}.mx-auto{margin-left:auto;margin-right:auto}.rounded{border-radius:.25rem}.rounded-full{border-radius:9999px}.border-0{border-width:0}.border-2{border-width:2px}.border-white{border-color:#fff}.border{border-width:1px}.border-gray-300{border-color:#d1d5db}.bg-white{background-color:#fff}.bg-gray-100{background-color:#f3f4f6}.bg-blue-600{background-color:#2563eb}.hover\:bg-blue-700:hover{background-color:#1d4ed8}.bg-red-600{background-color:#dc2626}.hover\:bg-red-700:hover{background-color:#b91c1c}.bg-cyan-100{background-color:#cffafe}.bg-green-100{background-color:#d1fae5}.bg-red-100{background-color:#fee2e2}.bg-cover{background-size:cover}.bg-center{background-position:50%}.bg-transparent{background-color:#0000}.text-left{text-align:left}.text-white{color:#fff}.text-gray-500{color:#6b7280}.hover\:text-gray-800:hover{color:#1f2937}.text-green-800{color:#065f46}.text-red-800{color:#991b1b}.text-cyan-800{color:#155e75}.text-base{font-size:1rem;line-height:1.5rem}.text-lg{font-size:1.125rem;line-height:1.75rem}.text-2xl{font-size:1.5rem;line-height:2rem}.font-bold{font-weight:700}.opacity-70{opacity:.7}.shadow{--tw-shadow:0 1px 3px 0 #0000001a,0 1px 2px -1px #0000001a;box-shadow:var(--tw-shadow)}.cursor-pointer{cursor:pointer}.box-border{box-sizing:border-box}.hover\:bg-transparent:hover{background-color:#0000}body{margin:0;padding:0;font-family:Arial,sans-serif}.container{background:#f9f9f9;border-radius:8px;margin-top:20px;padding:30px}.form-group{margin-bottom:20px}label{display:block;margin-bottom:5px;font-weight:700}input,textarea{border:1px solid #ddd;border-radius:4px;width:100%;padding:10px}button{color:#fff;cursor:pointer;background:#007cba;border:none;border-radius:4px;padding:12px 20px}button:hover{background:#005a87}.danger-button{background:#dc3545}.danger-button:hover{background:#c82333}.status{border-radius:4px;margin-top:20px;margin-bottom:20px;padding:10px}.success{color:#155724;background:#d4edda}.error{color:#721c24;background:#f8d7da}.info{color:#0c5460;background:#d1ecf1}.header-panel{position:sticky;z-index:1000;display:flex;overflow:hidden;background-color:#f8f9fa;background-position:50%;background-size:cover;align-items: center;width:100%;height:60px;top:0;left:0;box-shadow:0 2px 4px #0000001a}.header-content{display:flex;box-sizing:border-box;align-items: center;width:100%;height:100%;margin:0 auto;padding:0 0 0 12px}.header-left{display:flex;justify-content:flex-start;align-items: center;height:100%}.header-center{display:flex;position:relative;overflow:hidden;flex-grow:1;justify-content:flex-start;align-items: center}.header-right{display:flex;justify-content:flex-end;align-items: center;height:100%}.header-logo{aspect-ratio:1;object-fit:cover;border-radius:0;flex-shrink:0;width:auto;height:100%}.user-avatar{object-fit:cover;border:2px solid #fff;border-radius:50%;width:40px;height:40px;margin-right:10px;box-shadow:0 1px 3px #0003}.user-profile{display:flex;position:relative;z-index:1;align-items: center}.user-info{text-align:left;font-size:1.2em;font-weight:700}.user-name{display:block;font-size:1em;font-weight:700}.profile-banner{position:absolute;z-index:-1;opacity:.7;width:100%;height:100%;top:0;left:0}.logout-button{color:#6c757d;cursor:pointer;display:flex;background:0 0;border:none;flex-shrink:0;justify-content:center;align-items: center;width:48px;height:100%;margin-left:10px;margin-right:0;padding:0;font-size:20px}.logout-button:hover{color:#343a40;background:0 0}
|
||||
160
app/web/dist/index-zqddcpy5.js
vendored
Normal file
160
app/web/dist/index-zqddcpy5.js
vendored
Normal file
File diff suppressed because one or more lines are too long
13
app/web/dist/index.html
vendored
Normal file
13
app/web/dist/index.html
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Nostr Relay</title>
|
||||
|
||||
<link rel="stylesheet" crossorigin href="./index-zhtd763e.css"><script type="module" crossorigin src="./index-zqddcpy5.js"></script></head>
|
||||
<body class="bg-white">
|
||||
<div id="root"></div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
82
app/web/dist/tailwind.min.css
vendored
Normal file
82
app/web/dist/tailwind.min.css
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
/*
|
||||
Local Tailwind CSS (minimal subset for this UI)
|
||||
Note: This file includes just the utilities used by the app to keep size small.
|
||||
You can replace this with a full Tailwind build if desired.
|
||||
*/
|
||||
|
||||
/* Preflight-like resets (very minimal) */
|
||||
*,::before,::after{box-sizing:border-box;border-width:0;border-style:solid;border-color:#e5e7eb}
|
||||
html,body,#root{height:100%}
|
||||
html{line-height:1.5;-webkit-text-size-adjust:100%;tab-size:4;font-family:ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Helvetica,Arial,Noto Sans,\"Apple Color Emoji\",\"Segoe UI Emoji\"}
|
||||
body{margin:0}
|
||||
button,input{font:inherit;color:inherit}
|
||||
img{display:block;max-width:100%;height:auto}
|
||||
|
||||
/* Layout */
|
||||
.sticky{position:sticky}.relative{position:relative}.absolute{position:absolute}
|
||||
.top-0{top:0}.left-0{left:0}.inset-0{top:0;right:0;bottom:0;left:0}
|
||||
.z-50{z-index:50}.z-10{z-index:10}
|
||||
.block{display:block}.flex{display:flex}
|
||||
.items-center{align-items:center}.justify-start{justify-content:flex-start}.justify-center{justify-content:center}.justify-end{justify-content:flex-end}
|
||||
.flex-grow{flex-grow:1}.shrink-0{flex-shrink:0}
|
||||
.overflow-hidden{overflow:hidden}
|
||||
|
||||
/* Sizing */
|
||||
.w-full{width:100%}.w-auto{width:auto}.w-16{width:4rem}
|
||||
.h-full{height:100%}.h-16{height:4rem}
|
||||
.aspect-square{aspect-ratio:1/1}
|
||||
.max-w-3xl{max-width:48rem}
|
||||
|
||||
/* Spacing */
|
||||
.p-0{padding:0}.p-2{padding:.5rem}.p-3{padding:.75rem}.p-6{padding:1.5rem}
|
||||
.px-2{padding-left:.5rem;padding-right:.5rem}
|
||||
.mr-0{margin-right:0}.mr-2{margin-right:.5rem}
|
||||
.mt-2{margin-top:.5rem}.mt-5{margin-top:1.25rem}
|
||||
.mb-1{margin-bottom:.25rem}.mb-2{margin-bottom:.5rem}.mb-4{margin-bottom:1rem}.mb-5{margin-bottom:1.25rem}
|
||||
.mx-auto{margin-left:auto;margin-right:auto}
|
||||
|
||||
/* Borders & Radius */
|
||||
.rounded{border-radius:.25rem}.rounded-full{border-radius:9999px}
|
||||
.border-0{border-width:0}.border-2{border-width:2px}
|
||||
.border-white{border-color:#fff}
|
||||
.border{border-width:1px}.border-gray-300{border-color:#d1d5db}
|
||||
|
||||
/* Colors / Backgrounds */
|
||||
.bg-white{background-color:#fff}
|
||||
.bg-gray-100{background-color:#f3f4f6}
|
||||
.bg-blue-600{background-color:#2563eb}.hover\:bg-blue-700:hover{background-color:#1d4ed8}
|
||||
.bg-red-600{background-color:#dc2626}.hover\:bg-red-700:hover{background-color:#b91c1c}
|
||||
.bg-cyan-100{background-color:#cffafe}
|
||||
.bg-green-100{background-color:#d1fae5}
|
||||
.bg-red-100{background-color:#fee2e2}
|
||||
.bg-cover{background-size:cover}.bg-center{background-position:center}
|
||||
.bg-transparent{background-color:transparent}
|
||||
|
||||
/* Text */
|
||||
.text-left{text-align:left}
|
||||
.text-white{color:#fff}
|
||||
.text-gray-500{color:#6b7280}.hover\:text-gray-800:hover{color:#1f2937}
|
||||
.text-green-800{color:#065f46}
|
||||
.text-red-800{color:#991b1b}
|
||||
.text-cyan-800{color:#155e75}
|
||||
.text-base{font-size:1rem;line-height:1.5rem}
|
||||
.text-lg{font-size:1.125rem;line-height:1.75rem}
|
||||
.text-2xl{font-size:1.5rem;line-height:2rem}
|
||||
.font-bold{font-weight:700}
|
||||
|
||||
/* Opacity */
|
||||
.opacity-70{opacity:.7}
|
||||
|
||||
/* Effects */
|
||||
.shadow{--tw-shadow:0 1px 3px 0 rgba(0,0,0,0.1),0 1px 2px -1px rgba(0,0,0,0.1);box-shadow:var(--tw-shadow)}
|
||||
|
||||
/* Cursor */
|
||||
.cursor-pointer{cursor:pointer}
|
||||
|
||||
/* Box model */
|
||||
.box-border{box-sizing:border-box}
|
||||
|
||||
/* Utilities */
|
||||
.hover\:bg-transparent:hover{background-color:transparent}
|
||||
|
||||
/* Height for avatar images in header already inherit from container */
|
||||
18
app/web/package.json
Normal file
18
app/web/package.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"name": "orly-web",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "bun --hot --port 5173 public/dev.html",
|
||||
"build": "rm -rf dist && bun build ./public/index.html --outdir ./dist --minify --splitting && cp -r public/tailwind.min.css dist/",
|
||||
"preview": "bun x serve dist"
|
||||
},
|
||||
"dependencies": {
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"bun-types": "latest"
|
||||
}
|
||||
}
|
||||
13
app/web/public/dev.html
Normal file
13
app/web/public/dev.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Nostr Relay (Dev)</title>
|
||||
<link rel="stylesheet" href="tailwind.min.css" />
|
||||
</head>
|
||||
<body class="bg-white">
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/index.jsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
13
app/web/public/index.html
Normal file
13
app/web/public/index.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Nostr Relay</title>
|
||||
<link rel="stylesheet" href="tailwind.min.css" />
|
||||
</head>
|
||||
<body class="bg-white">
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/index.jsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
82
app/web/public/tailwind.min.css
vendored
Normal file
82
app/web/public/tailwind.min.css
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
/*
|
||||
Local Tailwind CSS (minimal subset for this UI)
|
||||
Note: This file includes just the utilities used by the app to keep size small.
|
||||
You can replace this with a full Tailwind build if desired.
|
||||
*/
|
||||
|
||||
/* Preflight-like resets (very minimal) */
|
||||
*,::before,::after{box-sizing:border-box;border-width:0;border-style:solid;border-color:#e5e7eb}
|
||||
html,body,#root{height:100%}
|
||||
html{line-height:1.5;-webkit-text-size-adjust:100%;tab-size:4;font-family:ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,Helvetica,Arial,Noto Sans,\"Apple Color Emoji\",\"Segoe UI Emoji\"}
|
||||
body{margin:0}
|
||||
button,input{font:inherit;color:inherit}
|
||||
img{display:block;max-width:100%;height:auto}
|
||||
|
||||
/* Layout */
|
||||
.sticky{position:sticky}.relative{position:relative}.absolute{position:absolute}
|
||||
.top-0{top:0}.left-0{left:0}.inset-0{top:0;right:0;bottom:0;left:0}
|
||||
.z-50{z-index:50}.z-10{z-index:10}
|
||||
.block{display:block}.flex{display:flex}
|
||||
.items-center{align-items:center}.justify-start{justify-content:flex-start}.justify-center{justify-content:center}.justify-end{justify-content:flex-end}
|
||||
.flex-grow{flex-grow:1}.shrink-0{flex-shrink:0}
|
||||
.overflow-hidden{overflow:hidden}
|
||||
|
||||
/* Sizing */
|
||||
.w-full{width:100%}.w-auto{width:auto}.w-16{width:4rem}
|
||||
.h-full{height:100%}.h-16{height:4rem}
|
||||
.aspect-square{aspect-ratio:1/1}
|
||||
.max-w-3xl{max-width:48rem}
|
||||
|
||||
/* Spacing */
|
||||
.p-0{padding:0}.p-2{padding:.5rem}.p-3{padding:.75rem}.p-6{padding:1.5rem}
|
||||
.px-2{padding-left:.5rem;padding-right:.5rem}
|
||||
.mr-0{margin-right:0}.mr-2{margin-right:.5rem}
|
||||
.mt-2{margin-top:.5rem}.mt-5{margin-top:1.25rem}
|
||||
.mb-1{margin-bottom:.25rem}.mb-2{margin-bottom:.5rem}.mb-4{margin-bottom:1rem}.mb-5{margin-bottom:1.25rem}
|
||||
.mx-auto{margin-left:auto;margin-right:auto}
|
||||
|
||||
/* Borders & Radius */
|
||||
.rounded{border-radius:.25rem}.rounded-full{border-radius:9999px}
|
||||
.border-0{border-width:0}.border-2{border-width:2px}
|
||||
.border-white{border-color:#fff}
|
||||
.border{border-width:1px}.border-gray-300{border-color:#d1d5db}
|
||||
|
||||
/* Colors / Backgrounds */
|
||||
.bg-white{background-color:#fff}
|
||||
.bg-gray-100{background-color:#f3f4f6}
|
||||
.bg-blue-600{background-color:#2563eb}.hover\:bg-blue-700:hover{background-color:#1d4ed8}
|
||||
.bg-red-600{background-color:#dc2626}.hover\:bg-red-700:hover{background-color:#b91c1c}
|
||||
.bg-cyan-100{background-color:#cffafe}
|
||||
.bg-green-100{background-color:#d1fae5}
|
||||
.bg-red-100{background-color:#fee2e2}
|
||||
.bg-cover{background-size:cover}.bg-center{background-position:center}
|
||||
.bg-transparent{background-color:transparent}
|
||||
|
||||
/* Text */
|
||||
.text-left{text-align:left}
|
||||
.text-white{color:#fff}
|
||||
.text-gray-500{color:#6b7280}.hover\:text-gray-800:hover{color:#1f2937}
|
||||
.text-green-800{color:#065f46}
|
||||
.text-red-800{color:#991b1b}
|
||||
.text-cyan-800{color:#155e75}
|
||||
.text-base{font-size:1rem;line-height:1.5rem}
|
||||
.text-lg{font-size:1.125rem;line-height:1.75rem}
|
||||
.text-2xl{font-size:1.5rem;line-height:2rem}
|
||||
.font-bold{font-weight:700}
|
||||
|
||||
/* Opacity */
|
||||
.opacity-70{opacity:.7}
|
||||
|
||||
/* Effects */
|
||||
.shadow{--tw-shadow:0 1px 3px 0 rgba(0,0,0,0.1),0 1px 2px -1px rgba(0,0,0,0.1);box-shadow:var(--tw-shadow)}
|
||||
|
||||
/* Cursor */
|
||||
.cursor-pointer{cursor:pointer}
|
||||
|
||||
/* Box model */
|
||||
.box-border{box-sizing:border-box}
|
||||
|
||||
/* Utilities */
|
||||
.hover\:bg-transparent:hover{background-color:transparent}
|
||||
|
||||
/* Height for avatar images in header already inherit from container */
|
||||
511
app/web/src/App.jsx
Normal file
511
app/web/src/App.jsx
Normal file
@@ -0,0 +1,511 @@
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
|
||||
function App() {
|
||||
const [user, setUser] = useState(null);
|
||||
const [status, setStatus] = useState('Ready to authenticate');
|
||||
const [statusType, setStatusType] = useState('info');
|
||||
const [profileData, setProfileData] = useState(null);
|
||||
|
||||
const [checkingAuth, setCheckingAuth] = useState(true);
|
||||
|
||||
// Login view layout measurements
|
||||
const titleRef = useRef(null);
|
||||
const fileInputRef = useRef(null);
|
||||
const [loginPadding, setLoginPadding] = useState(16); // default fallback padding in px
|
||||
|
||||
useEffect(() => {
|
||||
function updatePadding() {
|
||||
if (titleRef.current) {
|
||||
const h = titleRef.current.offsetHeight || 0;
|
||||
// Pad area around the text by half the title text height
|
||||
setLoginPadding(Math.max(0, Math.round(h / 2)));
|
||||
}
|
||||
}
|
||||
updatePadding();
|
||||
window.addEventListener('resize', updatePadding);
|
||||
return () => window.removeEventListener('resize', updatePadding);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
// Check authentication status on page load
|
||||
(async () => {
|
||||
await checkStatus();
|
||||
setCheckingAuth(false);
|
||||
})();
|
||||
}, []);
|
||||
|
||||
// Effect to fetch profile when user changes
|
||||
useEffect(() => {
|
||||
if (user?.pubkey) {
|
||||
fetchUserProfile(user.pubkey);
|
||||
}
|
||||
}, [user?.pubkey]);
|
||||
|
||||
function relayURL() {
|
||||
try {
|
||||
return window.location.protocol.replace('http', 'ws') + '//' + window.location.host;
|
||||
} catch (_) {
|
||||
return 'ws://localhost:3333';
|
||||
}
|
||||
}
|
||||
|
||||
async function checkStatus() {
|
||||
try {
|
||||
const response = await fetch('/api/auth/status');
|
||||
const data = await response.json();
|
||||
if (data.authenticated && data.pubkey) {
|
||||
// Fetch permission first, then set user and profile
|
||||
try {
|
||||
const permResponse = await fetch(`/api/permissions/${data.pubkey}`);
|
||||
const permData = await permResponse.json();
|
||||
if (permData && permData.permission) {
|
||||
const fullUser = { pubkey: data.pubkey, permission: permData.permission };
|
||||
setUser(fullUser);
|
||||
updateStatus(`Already authenticated as: ${data.pubkey.slice(0, 16)}...`, 'success');
|
||||
// Fire and forget profile fetch
|
||||
fetchUserProfile(data.pubkey);
|
||||
}
|
||||
} catch (_) {
|
||||
// ignore permission fetch errors
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore errors for status check
|
||||
}
|
||||
}
|
||||
|
||||
function updateStatus(message, type = 'info') {
|
||||
setStatus(message);
|
||||
setStatusType(type);
|
||||
}
|
||||
|
||||
function statusClassName() {
|
||||
const base = 'mt-5 mb-5 p-3 rounded';
|
||||
switch (statusType) {
|
||||
case 'success':
|
||||
return base + ' bg-green-100 text-green-800';
|
||||
case 'error':
|
||||
return base + ' bg-red-100 text-red-800';
|
||||
case 'info':
|
||||
default:
|
||||
return base + ' bg-cyan-100 text-cyan-800';
|
||||
}
|
||||
}
|
||||
|
||||
async function getChallenge() {
|
||||
try {
|
||||
const response = await fetch('/api/auth/challenge');
|
||||
const data = await response.json();
|
||||
return data.challenge;
|
||||
} catch (error) {
|
||||
updateStatus('Failed to get authentication challenge: ' + error.message, 'error');
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function loginWithExtension() {
|
||||
if (!window.nostr) {
|
||||
updateStatus('No Nostr extension found. Please install a NIP-07 compatible extension like nos2x or Alby.', 'error');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
updateStatus('Connecting to extension...', 'info');
|
||||
|
||||
// Get public key from extension
|
||||
const pubkey = await window.nostr.getPublicKey();
|
||||
|
||||
// Get challenge from server
|
||||
const challenge = await getChallenge();
|
||||
|
||||
// Create authentication event
|
||||
const authEvent = {
|
||||
kind: 22242,
|
||||
created_at: Math.floor(Date.now() / 1000),
|
||||
tags: [
|
||||
['relay', relayURL()],
|
||||
['challenge', challenge]
|
||||
],
|
||||
content: ''
|
||||
};
|
||||
|
||||
// Sign the event with extension
|
||||
const signedEvent = await window.nostr.signEvent(authEvent);
|
||||
|
||||
// Send to server
|
||||
await authenticate(signedEvent);
|
||||
|
||||
} catch (error) {
|
||||
updateStatus('Extension login failed: ' + error.message, 'error');
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchKind0FromRelay(pubkeyHex, timeoutMs = 4000) {
|
||||
return new Promise((resolve) => {
|
||||
let resolved = false;
|
||||
let events = [];
|
||||
let ws;
|
||||
try {
|
||||
ws = new WebSocket(relayURL());
|
||||
} catch (e) {
|
||||
resolve(null);
|
||||
return;
|
||||
}
|
||||
|
||||
const subId = 'profile-' + Math.random().toString(36).slice(2);
|
||||
const timer = setTimeout(() => {
|
||||
if (ws && ws.readyState === 1) {
|
||||
try { ws.close(); } catch (_) {}
|
||||
}
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
resolve(null);
|
||||
}
|
||||
}, timeoutMs);
|
||||
|
||||
ws.onopen = () => {
|
||||
try {
|
||||
const req = [
|
||||
'REQ',
|
||||
subId,
|
||||
{ kinds: [0], authors: [pubkeyHex] }
|
||||
];
|
||||
ws.send(JSON.stringify(req));
|
||||
} catch (_) {}
|
||||
};
|
||||
|
||||
ws.onmessage = (msg) => {
|
||||
try {
|
||||
const data = JSON.parse(msg.data);
|
||||
const type = data[0];
|
||||
if (type === 'EVENT' && data[1] === subId) {
|
||||
const event = data[2];
|
||||
if (event && event.kind === 0 && event.content) {
|
||||
events.push(event);
|
||||
}
|
||||
} else if (type === 'EOSE' && data[1] === subId) {
|
||||
try {
|
||||
ws.send(JSON.stringify(['CLOSE', subId]));
|
||||
} catch (_) {}
|
||||
try { ws.close(); } catch (_) {}
|
||||
clearTimeout(timer);
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
if (events.length) {
|
||||
const latest = events.reduce((a, b) => (a.created_at > b.created_at ? a : b));
|
||||
try {
|
||||
const meta = JSON.parse(latest.content);
|
||||
resolve(meta || null);
|
||||
} catch (_) {
|
||||
resolve(null);
|
||||
}
|
||||
} else {
|
||||
resolve(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (_) {
|
||||
// ignore malformed messages
|
||||
}
|
||||
};
|
||||
|
||||
ws.onerror = () => {
|
||||
try { ws.close(); } catch (_) {}
|
||||
clearTimeout(timer);
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
resolve(null);
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
clearTimeout(timer);
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
if (events.length) {
|
||||
const latest = events.reduce((a, b) => (a.created_at > b.created_at ? a : b));
|
||||
try {
|
||||
const meta = JSON.parse(latest.content);
|
||||
resolve(meta || null);
|
||||
} catch (_) {
|
||||
resolve(null);
|
||||
}
|
||||
} else {
|
||||
resolve(null);
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// Function to fetch user profile metadata (kind 0)
|
||||
async function fetchUserProfile(pubkeyHex) {
|
||||
try {
|
||||
// Create a simple placeholder with the pubkey
|
||||
const placeholderProfile = {
|
||||
name: `user:${pubkeyHex.slice(0, 8)}`,
|
||||
about: 'No profile data available'
|
||||
};
|
||||
|
||||
// Always set the placeholder profile first
|
||||
setProfileData(placeholderProfile);
|
||||
|
||||
// First, try to get profile kind:0 from the relay itself
|
||||
let relayMetadata = null;
|
||||
try {
|
||||
relayMetadata = await fetchKind0FromRelay(pubkeyHex);
|
||||
} catch (_) {}
|
||||
|
||||
if (relayMetadata) {
|
||||
const parsed = typeof relayMetadata === 'string' ? JSON.parse(relayMetadata) : relayMetadata;
|
||||
setProfileData({
|
||||
name: parsed.name || placeholderProfile.name,
|
||||
display_name: parsed.display_name,
|
||||
picture: parsed.picture,
|
||||
banner: parsed.banner,
|
||||
about: parsed.about || placeholderProfile.about
|
||||
});
|
||||
return parsed;
|
||||
}
|
||||
|
||||
// Fallback: try extension metadata if available
|
||||
if (window.nostr && window.nostr.getPublicKey) {
|
||||
try {
|
||||
if (window.nostr.getUserMetadata) {
|
||||
const metadata = await window.nostr.getUserMetadata();
|
||||
if (metadata) {
|
||||
try {
|
||||
const parsedMetadata = typeof metadata === 'string' ? JSON.parse(metadata) : metadata;
|
||||
setProfileData({
|
||||
name: parsedMetadata.name || placeholderProfile.name,
|
||||
display_name: parsedMetadata.display_name,
|
||||
picture: parsedMetadata.picture,
|
||||
banner: parsedMetadata.banner,
|
||||
about: parsedMetadata.about || placeholderProfile.about
|
||||
});
|
||||
return parsedMetadata;
|
||||
} catch (parseError) {
|
||||
console.log('Error parsing user metadata:', parseError);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (nostrError) {
|
||||
console.log('Could not get profile from extension:', nostrError);
|
||||
}
|
||||
}
|
||||
|
||||
return placeholderProfile;
|
||||
} catch (error) {
|
||||
console.error('Error handling profile data:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function authenticate(signedEvent) {
|
||||
try {
|
||||
const response = await fetch('/api/auth/login', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(signedEvent)
|
||||
});
|
||||
|
||||
const result = await response.json();
|
||||
|
||||
if (result.success) {
|
||||
setUser(result.pubkey);
|
||||
updateStatus('Successfully authenticated as: ' + result.pubkey.slice(0, 16) + '...', 'success');
|
||||
|
||||
// Check permissions after login
|
||||
const permResponse = await fetch(`/api/permissions/${result.pubkey}`);
|
||||
const permData = await permResponse.json();
|
||||
if (permData && permData.permission) {
|
||||
setUser({pubkey: result.pubkey, permission: permData.permission});
|
||||
|
||||
// Fetch user profile data
|
||||
await fetchUserProfile(result.pubkey);
|
||||
}
|
||||
} else {
|
||||
updateStatus('Authentication failed: ' + result.error, 'error');
|
||||
}
|
||||
} catch (error) {
|
||||
updateStatus('Authentication request failed: ' + error.message, 'error');
|
||||
}
|
||||
}
|
||||
|
||||
async function logout() {
|
||||
try {
|
||||
await fetch('/api/auth/logout', { method: 'POST' });
|
||||
} catch (_) {}
|
||||
setUser(null);
|
||||
updateStatus('Logged out', 'info');
|
||||
}
|
||||
|
||||
function handleImportButton() {
|
||||
try {
|
||||
fileInputRef?.current?.click();
|
||||
} catch (_) {}
|
||||
}
|
||||
|
||||
async function handleImportChange(e) {
|
||||
const file = e?.target?.files && e.target.files[0];
|
||||
if (!file) return;
|
||||
try {
|
||||
updateStatus('Uploading import file...', 'info');
|
||||
const fd = new FormData();
|
||||
fd.append('file', file);
|
||||
const res = await fetch('/api/import', { method: 'POST', body: fd });
|
||||
if (res.ok) {
|
||||
updateStatus('Import started. Processing will continue in the background.', 'success');
|
||||
} else {
|
||||
const txt = await res.text();
|
||||
updateStatus('Import failed: ' + txt, 'error');
|
||||
}
|
||||
} catch (err) {
|
||||
updateStatus('Import failed: ' + (err?.message || String(err)), 'error');
|
||||
} finally {
|
||||
// reset input so selecting the same file again works
|
||||
if (e && e.target) e.target.value = '';
|
||||
}
|
||||
}
|
||||
|
||||
// Prevent UI flash: wait until we checked auth status
|
||||
if (checkingAuth) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{user?.permission ? (
|
||||
<>
|
||||
{/* Logged in view with user profile */}
|
||||
<div className="sticky top-0 left-0 w-full bg-gray-100 z-50 h-16 flex items-center overflow-hidden">
|
||||
<div className="flex items-center h-full w-full box-border">
|
||||
<div className="relative overflow-hidden flex flex-grow items-center justify-start h-full">
|
||||
{profileData?.banner && (
|
||||
<div className="absolute inset-0 opacity-70 bg-cover bg-center" style={{ backgroundImage: `url(${profileData.banner})` }}></div>
|
||||
)}
|
||||
<div className="relative z-10 p-2 flex items-center h-full">
|
||||
{profileData?.picture && <img src={profileData.picture} alt="User Avatar" className="h-full aspect-square w-auto rounded-full object-cover border-2 border-white mr-2 shadow box-border" />}
|
||||
<div>
|
||||
<div className="font-bold text-base block">
|
||||
{profileData?.display_name || profileData?.name || user.pubkey.slice(0, 8)}
|
||||
{profileData?.name && profileData?.display_name && ` (${profileData.name})`}
|
||||
</div>
|
||||
<div className="font-bold text-lg text-left">
|
||||
{user.permission === "admin" ? "Admin Dashboard" : "Subscriber Dashboard"}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-center justify-end shrink-0 h-full">
|
||||
<button className="bg-transparent text-gray-500 border-0 text-2xl cursor-pointer flex items-center justify-center h-full aspect-square shrink-0 hover:bg-transparent hover:text-gray-800" onClick={logout}>✕</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/* Hidden file input for import (admin) */}
|
||||
<input
|
||||
type="file"
|
||||
ref={fileInputRef}
|
||||
onChange={handleImportChange}
|
||||
accept=".json,.jsonl,text/plain,application/x-ndjson,application/json"
|
||||
style={{ display: 'none' }}
|
||||
/>
|
||||
<div className="p-2 w-full bg-white border-b border-gray-200">
|
||||
<div className="text-lg font-bold flex items-center">Welcome</div>
|
||||
<p>here you can configure all the things</p>
|
||||
</div>
|
||||
|
||||
{/* Export only my events */}
|
||||
<div className="p-2 bg-white rounded w-full">
|
||||
<div className="w-full flex items-center justify-between">
|
||||
<div className="pr-2 w-full">
|
||||
<div className="text-base font-bold mb-1">Export My Events</div>
|
||||
<p className="text-sm w-full text-gray-700">Download your own events as line-delimited JSON (JSONL/NDJSON). Only events you authored will be included.</p>
|
||||
</div>
|
||||
<button
|
||||
className="bg-gray-100 text-gray-500 border-0 text-2xl cursor-pointer flex items-center justify-center h-full aspect-square shrink-0 hover:bg-transparent hover:text-gray-800"
|
||||
onClick={() => { window.location.href = '/api/export/mine'; }}
|
||||
aria-label="Download my events as JSONL"
|
||||
title="Download my events"
|
||||
>
|
||||
⤓
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{user.permission === "admin" && (
|
||||
<>
|
||||
<div className="p-2 w-full rounded">
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="pr-2 w-full">
|
||||
<div className="text-base font-bold mb-1">Export All Events (admin)</div>
|
||||
<p className="text-sm text-gray-700">Download all stored events as line-delimited JSON (JSONL/NDJSON). This may take a while on large databases.</p>
|
||||
</div>
|
||||
<button
|
||||
className="bg-gray-100 text-gray-500 border-0 text-2xl cursor-pointer flex items-center justify-center h-full aspect-square shrink-0 hover:bg-transparent hover:text-gray-800"
|
||||
onClick={() => { window.location.href = '/api/export'; }}
|
||||
aria-label="Download all events as JSONL"
|
||||
title="Download all events"
|
||||
>
|
||||
⤓
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="p-2 w-full rounded">
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="pr-2 w-full">
|
||||
<div className="text-base font-bold mb-1">Import Events (admin)</div>
|
||||
<p className="text-sm text-gray-700">Upload events in line-delimited JSON (JSONL/NDJSON) to import into the database.</p>
|
||||
</div>
|
||||
<button
|
||||
className="bg-gray-100 text-gray-500 border-0 text-2xl cursor-pointer flex items-center justify-center h-full aspect-square shrink-0 hover:bg-transparent hover:text-gray-800"
|
||||
onClick={handleImportButton}
|
||||
aria-label="Import events from JSONL"
|
||||
title="Import events"
|
||||
>
|
||||
↥
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
// Not logged in view - shows the login form
|
||||
<div className="w-full h-full flex items-center justify-center">
|
||||
<div
|
||||
className="bg-gray-100"
|
||||
style={{ width: '800px', maxWidth: '100%', boxSizing: 'border-box', padding: `${loginPadding}px` }}
|
||||
>
|
||||
<div className="flex items-center gap-3 mb-3">
|
||||
<img
|
||||
src="/orly.png"
|
||||
alt="Orly logo"
|
||||
className="object-contain"
|
||||
style={{ width: '4rem', height: '4rem' }}
|
||||
onError={(e) => {
|
||||
// fallback to repo docs image if public asset missing
|
||||
e.currentTarget.onerror = null;
|
||||
e.currentTarget.src = "/docs/orly.png";
|
||||
}}
|
||||
/>
|
||||
<h1 ref={titleRef} className="text-2xl font-bold p-2">ORLY🦉 Dashboard Login</h1>
|
||||
</div>
|
||||
|
||||
<p className="mb-4">Authenticate to this Nostr relay using your browser extension.</p>
|
||||
|
||||
<div className={statusClassName()}>
|
||||
{status}
|
||||
</div>
|
||||
|
||||
<div className="mb-5">
|
||||
<button className="bg-blue-600 text-white px-5 py-3 rounded hover:bg-blue-700" onClick={loginWithExtension}>Login with Browser Extension (NIP-07)</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default App;
|
||||
11
app/web/src/index.jsx
Normal file
11
app/web/src/index.jsx
Normal file
@@ -0,0 +1,11 @@
|
||||
import React from 'react';
|
||||
import { createRoot } from 'react-dom/client';
|
||||
import App from './App';
|
||||
import './styles.css';
|
||||
|
||||
const root = createRoot(document.getElementById('root'));
|
||||
root.render(
|
||||
<React.StrictMode>
|
||||
<App />
|
||||
</React.StrictMode>
|
||||
);
|
||||
191
app/web/src/styles.css
Normal file
191
app/web/src/styles.css
Normal file
@@ -0,0 +1,191 @@
|
||||
body {
|
||||
font-family: Arial, sans-serif;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.container {
|
||||
background: #f9f9f9;
|
||||
padding: 30px;
|
||||
border-radius: 8px;
|
||||
margin-top: 20px; /* Reduced space since header is now sticky */
|
||||
}
|
||||
|
||||
.form-group {
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
label {
|
||||
display: block;
|
||||
margin-bottom: 5px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
input, textarea {
|
||||
width: 100%;
|
||||
padding: 10px;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
button {
|
||||
background: #007cba;
|
||||
color: white;
|
||||
padding: 12px 20px;
|
||||
border: none;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
button:hover {
|
||||
background: #005a87;
|
||||
}
|
||||
|
||||
.danger-button {
|
||||
background: #dc3545;
|
||||
}
|
||||
|
||||
.danger-button:hover {
|
||||
background: #c82333;
|
||||
}
|
||||
|
||||
.status {
|
||||
margin-top: 20px;
|
||||
margin-bottom: 20px;
|
||||
padding: 10px;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
.success {
|
||||
background: #d4edda;
|
||||
color: #155724;
|
||||
}
|
||||
|
||||
.error {
|
||||
background: #f8d7da;
|
||||
color: #721c24;
|
||||
}
|
||||
|
||||
.info {
|
||||
background: #d1ecf1;
|
||||
color: #0c5460;
|
||||
}
|
||||
|
||||
.header-panel {
|
||||
position: sticky;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
background-color: #f8f9fa;
|
||||
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
||||
z-index: 1000;
|
||||
height: 60px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
background-size: cover;
|
||||
background-position: center;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.header-content {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
height: 100%;
|
||||
padding: 0 0 0 12px;
|
||||
width: 100%;
|
||||
margin: 0 auto;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.header-left {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: flex-start;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.header-center {
|
||||
display: flex;
|
||||
flex-grow: 1;
|
||||
align-items: center;
|
||||
justify-content: flex-start;
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.header-right {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: flex-end;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.header-logo {
|
||||
height: 100%;
|
||||
aspect-ratio: 1 / 1;
|
||||
width: auto;
|
||||
border-radius: 0;
|
||||
object-fit: cover;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.user-avatar {
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
border-radius: 50%;
|
||||
object-fit: cover;
|
||||
border: 2px solid white;
|
||||
margin-right: 10px;
|
||||
box-shadow: 0 1px 3px rgba(0,0,0,0.2);
|
||||
}
|
||||
|
||||
.user-profile {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
position: relative;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
.user-info {
|
||||
font-weight: bold;
|
||||
font-size: 1.2em;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.user-name {
|
||||
font-weight: bold;
|
||||
font-size: 1em;
|
||||
display: block;
|
||||
}
|
||||
|
||||
.profile-banner {
|
||||
position: absolute;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
top: 0;
|
||||
left: 0;
|
||||
z-index: -1;
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
.logout-button {
|
||||
background: transparent;
|
||||
color: #6c757d;
|
||||
border: none;
|
||||
font-size: 20px;
|
||||
cursor: pointer;
|
||||
padding: 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 48px;
|
||||
height: 100%;
|
||||
margin-left: 10px;
|
||||
margin-right: 0;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.logout-button:hover {
|
||||
background: transparent;
|
||||
color: #343a40;
|
||||
}
|
||||
11
main.go
11
main.go
@@ -18,6 +18,7 @@ import (
|
||||
"next.orly.dev/app/config"
|
||||
"next.orly.dev/pkg/acl"
|
||||
"next.orly.dev/pkg/database"
|
||||
"next.orly.dev/pkg/spider"
|
||||
"next.orly.dev/pkg/version"
|
||||
)
|
||||
|
||||
@@ -158,6 +159,12 @@ func main() {
|
||||
}
|
||||
acl.Registry.Syncer()
|
||||
|
||||
// Initialize and start spider functionality if enabled
|
||||
spiderCtx, spiderCancel := context.WithCancel(ctx)
|
||||
spiderInstance := spider.New(db, cfg, spiderCtx, spiderCancel)
|
||||
spiderInstance.Start()
|
||||
defer spiderInstance.Stop()
|
||||
|
||||
// Start HTTP pprof server if enabled
|
||||
if cfg.PprofHTTP {
|
||||
pprofAddr := fmt.Sprintf("%s:%d", cfg.Listen, 6060)
|
||||
@@ -254,9 +261,13 @@ func main() {
|
||||
fmt.Printf("\r")
|
||||
cancel()
|
||||
chk.E(db.Close())
|
||||
log.I.F("exiting")
|
||||
return
|
||||
case <-quit:
|
||||
cancel()
|
||||
chk.E(db.Close())
|
||||
log.I.F("exiting")
|
||||
return
|
||||
}
|
||||
}
|
||||
log.I.F("exiting")
|
||||
|
||||
@@ -360,6 +360,16 @@ func (f *Follows) Syncer() {
|
||||
f.updated <- struct{}{}
|
||||
}
|
||||
|
||||
// GetFollowedPubkeys returns a copy of the followed pubkeys list
|
||||
func (f *Follows) GetFollowedPubkeys() [][]byte {
|
||||
f.followsMx.RLock()
|
||||
defer f.followsMx.RUnlock()
|
||||
|
||||
followedPubkeys := make([][]byte, len(f.follows))
|
||||
copy(followedPubkeys, f.follows)
|
||||
return followedPubkeys
|
||||
}
|
||||
|
||||
func init() {
|
||||
log.T.F("registering follows ACL")
|
||||
Registry.Register(new(Follows))
|
||||
|
||||
62
pkg/database/markers.go
Normal file
62
pkg/database/markers.go
Normal file
@@ -0,0 +1,62 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"github.com/dgraph-io/badger/v4"
|
||||
"lol.mleku.dev/chk"
|
||||
)
|
||||
|
||||
const (
|
||||
markerPrefix = "MARKER:"
|
||||
)
|
||||
|
||||
// SetMarker stores an arbitrary marker in the database
|
||||
func (d *D) SetMarker(key string, value []byte) (err error) {
|
||||
markerKey := []byte(markerPrefix + key)
|
||||
|
||||
err = d.Update(func(txn *badger.Txn) error {
|
||||
return txn.Set(markerKey, value)
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// GetMarker retrieves an arbitrary marker from the database
|
||||
func (d *D) GetMarker(key string) (value []byte, err error) {
|
||||
markerKey := []byte(markerPrefix + key)
|
||||
|
||||
err = d.View(func(txn *badger.Txn) error {
|
||||
item, err := txn.Get(markerKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value, err = item.ValueCopy(nil)
|
||||
return err
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// HasMarker checks if a marker exists in the database
|
||||
func (d *D) HasMarker(key string) (exists bool) {
|
||||
markerKey := []byte(markerPrefix + key)
|
||||
|
||||
err := d.View(func(txn *badger.Txn) error {
|
||||
_, err := txn.Get(markerKey)
|
||||
return err
|
||||
})
|
||||
|
||||
exists = !chk.E(err)
|
||||
return
|
||||
}
|
||||
|
||||
// DeleteMarker removes a marker from the database
|
||||
func (d *D) DeleteMarker(key string) (err error) {
|
||||
markerKey := []byte(markerPrefix + key)
|
||||
|
||||
err = d.Update(func(txn *badger.Txn) error {
|
||||
return txn.Delete(markerKey)
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
@@ -24,7 +24,8 @@ func NewSWithCap(c int) (s *S) {
|
||||
|
||||
func (s *S) Len() int {
|
||||
if s == nil {
|
||||
panic("tags cannot be used without initialization")
|
||||
return 0
|
||||
// panic("tags cannot be used without initialization")
|
||||
}
|
||||
return len(*s)
|
||||
}
|
||||
|
||||
@@ -66,6 +66,10 @@ func UnmarshalQuoted(b []byte) (content, rem []byte, err error) {
|
||||
}
|
||||
rem = b[:]
|
||||
for ; len(rem) >= 0; rem = rem[1:] {
|
||||
if len(rem) == 0 {
|
||||
err = io.EOF
|
||||
return
|
||||
}
|
||||
// advance to open quotes
|
||||
if rem[0] == '"' {
|
||||
rem = rem[1:]
|
||||
|
||||
372
pkg/spider/spider.go
Normal file
372
pkg/spider/spider.go
Normal file
@@ -0,0 +1,372 @@
|
||||
package spider
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
"lol.mleku.dev/log"
|
||||
"next.orly.dev/app/config"
|
||||
"next.orly.dev/pkg/acl"
|
||||
"next.orly.dev/pkg/database"
|
||||
"next.orly.dev/pkg/database/indexes/types"
|
||||
"next.orly.dev/pkg/encoders/filter"
|
||||
"next.orly.dev/pkg/encoders/kind"
|
||||
"next.orly.dev/pkg/encoders/tag"
|
||||
"next.orly.dev/pkg/encoders/timestamp"
|
||||
"next.orly.dev/pkg/protocol/ws"
|
||||
"next.orly.dev/pkg/utils/normalize"
|
||||
)
|
||||
|
||||
const (
|
||||
OneTimeSpiderSyncMarker = "spider_one_time_sync_completed"
|
||||
SpiderLastScanMarker = "spider_last_scan_time"
|
||||
)
|
||||
|
||||
type Spider struct {
|
||||
db *database.D
|
||||
cfg *config.C
|
||||
ctx context.Context
|
||||
cancel context.CancelFunc
|
||||
}
|
||||
|
||||
func New(
|
||||
db *database.D, cfg *config.C, ctx context.Context,
|
||||
cancel context.CancelFunc,
|
||||
) *Spider {
|
||||
return &Spider{
|
||||
db: db,
|
||||
cfg: cfg,
|
||||
ctx: ctx,
|
||||
cancel: cancel,
|
||||
}
|
||||
}
|
||||
|
||||
// Start initializes the spider functionality based on configuration
|
||||
func (s *Spider) Start() {
|
||||
if s.cfg.SpiderMode != "follows" {
|
||||
log.D.Ln("Spider mode is not set to 'follows', skipping spider functionality")
|
||||
return
|
||||
}
|
||||
|
||||
log.I.Ln("Starting spider in follow mode")
|
||||
|
||||
// Check if one-time sync has been completed
|
||||
if !s.db.HasMarker(OneTimeSpiderSyncMarker) {
|
||||
log.I.Ln("Performing one-time spider sync back one month")
|
||||
go s.performOneTimeSync()
|
||||
} else {
|
||||
log.D.Ln("One-time spider sync already completed, skipping")
|
||||
}
|
||||
|
||||
// Start periodic scanning
|
||||
go s.startPeriodicScanning()
|
||||
}
|
||||
|
||||
// performOneTimeSync performs the initial sync going back one month
|
||||
func (s *Spider) performOneTimeSync() {
|
||||
defer func() {
|
||||
// Mark the one-time sync as completed
|
||||
timestamp := strconv.FormatInt(time.Now().Unix(), 10)
|
||||
if err := s.db.SetMarker(
|
||||
OneTimeSpiderSyncMarker, []byte(timestamp),
|
||||
); err != nil {
|
||||
log.E.F("Failed to set one-time sync marker: %v", err)
|
||||
} else {
|
||||
log.I.Ln("One-time spider sync completed and marked")
|
||||
}
|
||||
}()
|
||||
|
||||
// Calculate the time one month ago
|
||||
oneMonthAgo := time.Now().AddDate(0, -1, 0)
|
||||
log.I.F("Starting one-time spider sync from %v", oneMonthAgo)
|
||||
|
||||
// Perform the sync (placeholder - would need actual implementation based on follows)
|
||||
if err := s.performSync(oneMonthAgo, time.Now()); err != nil {
|
||||
log.E.F("One-time spider sync failed: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
log.I.Ln("One-time spider sync completed successfully")
|
||||
}
|
||||
|
||||
// startPeriodicScanning starts the regular scanning process
|
||||
func (s *Spider) startPeriodicScanning() {
|
||||
ticker := time.NewTicker(s.cfg.SpiderFrequency)
|
||||
defer ticker.Stop()
|
||||
|
||||
log.I.F("Starting periodic spider scanning every %v", s.cfg.SpiderFrequency)
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-s.ctx.Done():
|
||||
log.D.Ln("Spider periodic scanning stopped due to context cancellation")
|
||||
return
|
||||
case <-ticker.C:
|
||||
s.performPeriodicScan()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// performPeriodicScan performs the regular scan of the last two hours (double the frequency window)
|
||||
func (s *Spider) performPeriodicScan() {
|
||||
// Calculate the scanning window (double the frequency period)
|
||||
scanWindow := s.cfg.SpiderFrequency * 2
|
||||
scanStart := time.Now().Add(-scanWindow)
|
||||
scanEnd := time.Now()
|
||||
|
||||
log.D.F(
|
||||
"Performing periodic spider scan from %v to %v (window: %v)", scanStart,
|
||||
scanEnd, scanWindow,
|
||||
)
|
||||
|
||||
if err := s.performSync(scanStart, scanEnd); err != nil {
|
||||
log.E.F("Periodic spider scan failed: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Update the last scan marker
|
||||
timestamp := strconv.FormatInt(time.Now().Unix(), 10)
|
||||
if err := s.db.SetMarker(
|
||||
SpiderLastScanMarker, []byte(timestamp),
|
||||
); err != nil {
|
||||
log.E.F("Failed to update last scan marker: %v", err)
|
||||
}
|
||||
|
||||
log.D.F("Periodic spider scan completed successfully")
|
||||
}
|
||||
|
||||
// performSync performs the actual sync operation for the given time range
|
||||
func (s *Spider) performSync(startTime, endTime time.Time) error {
|
||||
log.D.F(
|
||||
"Spider sync from %v to %v - starting implementation", startTime,
|
||||
endTime,
|
||||
)
|
||||
|
||||
// 1. Check ACL mode is set to "follows"
|
||||
if s.cfg.ACLMode != "follows" {
|
||||
log.D.F(
|
||||
"Spider sync skipped - ACL mode is not 'follows' (current: %s)",
|
||||
s.cfg.ACLMode,
|
||||
)
|
||||
return nil
|
||||
}
|
||||
|
||||
// 2. Get the list of followed users from the ACL system
|
||||
followedPubkeys, err := s.getFollowedPubkeys()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(followedPubkeys) == 0 {
|
||||
log.D.Ln("Spider sync: no followed pubkeys found")
|
||||
return nil
|
||||
}
|
||||
|
||||
log.D.F("Spider sync: found %d followed pubkeys", len(followedPubkeys))
|
||||
|
||||
// 3. Discover relay lists from followed users
|
||||
relayURLs, err := s.discoverRelays(followedPubkeys)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(relayURLs) == 0 {
|
||||
log.W.Ln("Spider sync: no relays discovered from followed users")
|
||||
return nil
|
||||
}
|
||||
|
||||
log.I.F("Spider sync: discovered %d relay URLs", len(relayURLs))
|
||||
|
||||
// 4. Query each relay for events from followed pubkeys in the time range
|
||||
eventsFound := 0
|
||||
for _, relayURL := range relayURLs {
|
||||
count, err := s.queryRelayForEvents(
|
||||
relayURL, followedPubkeys, startTime, endTime,
|
||||
)
|
||||
if err != nil {
|
||||
log.E.F("Spider sync: error querying relay %s: %v", relayURL, err)
|
||||
continue
|
||||
}
|
||||
eventsFound += count
|
||||
}
|
||||
|
||||
log.I.F(
|
||||
"Spider sync completed: found %d new events from %d relays",
|
||||
eventsFound, len(relayURLs),
|
||||
)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// getFollowedPubkeys retrieves the list of followed pubkeys from the ACL system
|
||||
func (s *Spider) getFollowedPubkeys() ([][]byte, error) {
|
||||
// Access the ACL registry to get the current ACL instance
|
||||
var followedPubkeys [][]byte
|
||||
|
||||
// Get all ACL instances and find the active one
|
||||
for _, aclInstance := range acl.Registry.ACL {
|
||||
if aclInstance.Type() == acl.Registry.Active.Load() {
|
||||
// Cast to *Follows to access the follows field
|
||||
if followsACL, ok := aclInstance.(*acl.Follows); ok {
|
||||
followedPubkeys = followsACL.GetFollowedPubkeys()
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return followedPubkeys, nil
|
||||
}
|
||||
|
||||
// discoverRelays discovers relay URLs from kind 10002 events of followed users
|
||||
func (s *Spider) discoverRelays(followedPubkeys [][]byte) ([]string, error) {
|
||||
seen := make(map[string]struct{})
|
||||
var urls []string
|
||||
|
||||
for _, pubkey := range followedPubkeys {
|
||||
// Query for kind 10002 (RelayListMetadata) events from this pubkey
|
||||
fl := &filter.F{
|
||||
Authors: tag.NewFromAny(pubkey),
|
||||
Kinds: kind.NewS(kind.New(kind.RelayListMetadata.K)),
|
||||
}
|
||||
|
||||
idxs, err := database.GetIndexesFromFilter(fl)
|
||||
if chk.E(err) {
|
||||
continue
|
||||
}
|
||||
|
||||
var sers types.Uint40s
|
||||
for _, idx := range idxs {
|
||||
s, err := s.db.GetSerialsByRange(idx)
|
||||
if chk.E(err) {
|
||||
continue
|
||||
}
|
||||
sers = append(sers, s...)
|
||||
}
|
||||
|
||||
for _, ser := range sers {
|
||||
ev, err := s.db.FetchEventBySerial(ser)
|
||||
if chk.E(err) || ev == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Extract relay URLs from 'r' tags
|
||||
for _, v := range ev.Tags.GetAll([]byte("r")) {
|
||||
u := string(v.Value())
|
||||
n := string(normalize.URL(u))
|
||||
if n == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[n]; ok {
|
||||
continue
|
||||
}
|
||||
seen[n] = struct{}{}
|
||||
urls = append(urls, n)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return urls, nil
|
||||
}
|
||||
|
||||
// queryRelayForEvents connects to a relay and queries for events from followed pubkeys
|
||||
func (s *Spider) queryRelayForEvents(
|
||||
relayURL string, followedPubkeys [][]byte, startTime, endTime time.Time,
|
||||
) (int, error) {
|
||||
log.T.F("Spider sync: querying relay %s", relayURL)
|
||||
|
||||
// Connect to the relay with a timeout context
|
||||
ctx, cancel := context.WithTimeout(s.ctx, 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
client, err := ws.RelayConnect(ctx, relayURL)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer client.Close()
|
||||
|
||||
// Create filter for the time range and followed pubkeys
|
||||
f := &filter.F{
|
||||
Authors: tag.NewFromBytesSlice(followedPubkeys...),
|
||||
Since: timestamp.FromUnix(startTime.Unix()),
|
||||
Until: timestamp.FromUnix(endTime.Unix()),
|
||||
Limit: func() *uint { l := uint(1000); return &l }(), // Limit to avoid overwhelming
|
||||
}
|
||||
|
||||
// Subscribe to get events
|
||||
sub, err := client.Subscribe(ctx, filter.NewS(f))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer sub.Unsub()
|
||||
|
||||
eventsCount := 0
|
||||
eventsSaved := 0
|
||||
timeout := time.After(10 * time.Second) // Timeout for receiving events
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
log.T.F(
|
||||
"Spider sync: context done for relay %s, saved %d/%d events",
|
||||
relayURL, eventsSaved, eventsCount,
|
||||
)
|
||||
return eventsSaved, nil
|
||||
case <-timeout:
|
||||
log.T.F(
|
||||
"Spider sync: timeout for relay %s, saved %d/%d events",
|
||||
relayURL, eventsSaved, eventsCount,
|
||||
)
|
||||
return eventsSaved, nil
|
||||
case <-sub.EndOfStoredEvents:
|
||||
log.T.F(
|
||||
"Spider sync: end of stored events for relay %s, saved %d/%d events",
|
||||
relayURL, eventsSaved, eventsCount,
|
||||
)
|
||||
return eventsSaved, nil
|
||||
case ev := <-sub.Events:
|
||||
if ev == nil {
|
||||
continue
|
||||
}
|
||||
eventsCount++
|
||||
|
||||
// Verify the event signature
|
||||
if ok, err := ev.Verify(); !ok || err != nil {
|
||||
log.T.F(
|
||||
"Spider sync: invalid event signature from relay %s",
|
||||
relayURL,
|
||||
)
|
||||
ev.Free()
|
||||
continue
|
||||
}
|
||||
|
||||
// Save the event to the database
|
||||
if _, _, err := s.db.SaveEvent(s.ctx, ev); err != nil {
|
||||
if !strings.HasPrefix(err.Error(), "blocked:") {
|
||||
log.T.F(
|
||||
"Spider sync: error saving event from relay %s: %v",
|
||||
relayURL, err,
|
||||
)
|
||||
}
|
||||
// Event might already exist, which is fine for deduplication
|
||||
} else {
|
||||
eventsSaved++
|
||||
if eventsSaved%10 == 0 {
|
||||
log.T.F(
|
||||
"Spider sync: saved %d events from relay %s",
|
||||
eventsSaved, relayURL,
|
||||
)
|
||||
}
|
||||
}
|
||||
ev.Free()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Stop stops the spider functionality
|
||||
func (s *Spider) Stop() {
|
||||
log.D.Ln("Stopping spider")
|
||||
s.cancel()
|
||||
}
|
||||
@@ -1 +1 @@
|
||||
v0.4.9
|
||||
v0.6.0
|
||||
97
scripts/update-embedded-web.sh
Executable file
97
scripts/update-embedded-web.sh
Executable file
@@ -0,0 +1,97 @@
|
||||
#!/usr/bin/env bash
|
||||
# scripts/update-embedded-web.sh
|
||||
# Build the embedded web UI and then install the Go binary.
|
||||
#
|
||||
# This script will:
|
||||
# - Build the React app in app/web to app/web/dist using Bun (preferred),
|
||||
# or fall back to npm/yarn/pnpm if Bun isn't available.
|
||||
# - Run `go install` from the repository root so the binary picks up the new
|
||||
# embedded assets.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/update-embedded-web.sh
|
||||
#
|
||||
# Requirements:
|
||||
# - Go 1.18+ installed (for `go install` and go:embed support)
|
||||
# - Bun (https://bun.sh) recommended; alternatively Node.js with npm/yarn/pnpm
|
||||
#
|
||||
set -euo pipefail
|
||||
|
||||
# Resolve repo root to allow running from anywhere
|
||||
SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd -- "${SCRIPT_DIR}/.." && pwd)"
|
||||
WEB_DIR="${REPO_ROOT}/app/web"
|
||||
|
||||
log() { printf "[update-embedded-web] %s\n" "$*"; }
|
||||
err() { printf "[update-embedded-web][ERROR] %s\n" "$*" >&2; }
|
||||
|
||||
if [[ ! -d "${WEB_DIR}" ]]; then
|
||||
err "Expected web directory at ${WEB_DIR} not found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Choose a JS package runner
|
||||
JS_RUNNER=""
|
||||
if command -v bun >/dev/null 2>&1; then
|
||||
JS_RUNNER="bun"
|
||||
elif command -v npm >/dev/null 2>&1; then
|
||||
JS_RUNNER="npm"
|
||||
elif command -v yarn >/dev/null 2>&1; then
|
||||
JS_RUNNER="yarn"
|
||||
elif command -v pnpm >/dev/null 2>&1; then
|
||||
JS_RUNNER="pnpm"
|
||||
else
|
||||
err "No JavaScript package manager found. Install Bun (recommended) or npm/yarn/pnpm."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "Using JavaScript runner: ${JS_RUNNER}"
|
||||
|
||||
# Install dependencies and build the web app
|
||||
log "Installing frontend dependencies..."
|
||||
pushd "${WEB_DIR}" >/dev/null
|
||||
case "${JS_RUNNER}" in
|
||||
bun)
|
||||
bun install
|
||||
log "Building web app with Bun..."
|
||||
bun run build
|
||||
;;
|
||||
npm)
|
||||
npm ci || npm install
|
||||
log "Building web app with npm..."
|
||||
npm run build
|
||||
;;
|
||||
yarn)
|
||||
yarn install --frozen-lockfile || yarn install
|
||||
log "Building web app with yarn..."
|
||||
yarn build
|
||||
;;
|
||||
pnpm)
|
||||
pnpm install --frozen-lockfile || pnpm install
|
||||
log "Building web app with pnpm..."
|
||||
pnpm build
|
||||
;;
|
||||
*)
|
||||
err "Unsupported JS runner: ${JS_RUNNER}"
|
||||
exit 1
|
||||
;;
|
||||
|
||||
esac
|
||||
popd >/dev/null
|
||||
|
||||
# Verify the output directory expected by go:embed exists
|
||||
DIST_DIR="${WEB_DIR}/dist"
|
||||
if [[ ! -d "${DIST_DIR}" ]]; then
|
||||
err "Build did not produce ${DIST_DIR}. Check your frontend build configuration."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "Frontend build complete at ${DIST_DIR}."
|
||||
|
||||
# Install the Go binary so it embeds the latest files
|
||||
log "Running 'go install' from repo root..."
|
||||
pushd "${REPO_ROOT}" >/dev/null
|
||||
GO111MODULE=on go install ./...
|
||||
popd >/dev/null
|
||||
|
||||
log "Done. Your installed binary now includes the updated embedded web UI."
|
||||
Reference in New Issue
Block a user