[sources] Add support for compressing, additional meta-data, better content-type detection, minor other changes
This commit is contained in:
parent
623ac0040d
commit
40826c8e3a
8 changed files with 358 additions and 101 deletions
|
@ -23,7 +23,17 @@ import . "../lib"
|
|||
|
||||
|
||||
|
||||
func archiveFile (_cdbWriter *cdb.Writer, _pathResolved string, _pathInArchive string, _name string, _stat os.FileInfo, _stored map[string]bool, _debug bool) (error) {
|
||||
type context struct {
|
||||
cdbWriter *cdb.Writer
|
||||
storedData map[string]bool
|
||||
compress string
|
||||
debug bool
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
func archiveFile (_context *context, _pathResolved string, _pathInArchive string, _name string, _stat os.FileInfo) (error) {
|
||||
|
||||
var _data []byte
|
||||
if _data_0, _error := ioutil.ReadFile (_pathResolved); _error == nil {
|
||||
|
@ -32,13 +42,17 @@ func archiveFile (_cdbWriter *cdb.Writer, _pathResolved string, _pathInArchive s
|
|||
return _error
|
||||
}
|
||||
|
||||
return archiveData (_cdbWriter, NamespaceFilesContent, _pathInArchive, _data, "", _stored, _debug)
|
||||
if _, _error := archiveData (_context, NamespaceFilesContent, _pathInArchive, _name, _data, ""); _error != nil {
|
||||
return _error
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
func archiveFolder (_cdbWriter *cdb.Writer, _pathResolved string, _pathInArchive string, _names []string, _stats map[string]os.FileInfo, _stored map[string]bool, _debug bool) (error) {
|
||||
func archiveFolder (_context *context, _pathResolved string, _pathInArchive string, _names []string, _stats map[string]os.FileInfo) (error) {
|
||||
|
||||
type Entry struct {
|
||||
Name string `json:"name",omitempty`
|
||||
|
@ -72,7 +86,11 @@ func archiveFolder (_cdbWriter *cdb.Writer, _pathResolved string, _pathInArchive
|
|||
}
|
||||
|
||||
if _data, _error := json.Marshal (&_folder); _error == nil {
|
||||
return archiveData (_cdbWriter, NamespaceFoldersMetadata, _pathInArchive, _data, "application/json; charset=utf-8", _stored, _debug)
|
||||
if _, _error := archiveData (_context, NamespaceFoldersEntries, _pathInArchive, "", _data, MimeTypeJson); _error != nil {
|
||||
return _error
|
||||
}
|
||||
} else {
|
||||
return _error
|
||||
}
|
||||
|
||||
return nil
|
||||
|
@ -81,60 +99,89 @@ func archiveFolder (_cdbWriter *cdb.Writer, _pathResolved string, _pathInArchive
|
|||
|
||||
|
||||
|
||||
func archiveData (_cdbWriter *cdb.Writer, _namespace string, _pathInArchive string, _data []byte, _dataType string, _stored map[string]bool, _debug bool) (error) {
|
||||
func archiveData (_context *context, _namespace string, _pathInArchive string, _name string, _data []byte, _dataType string) (string, error) {
|
||||
|
||||
_fingerprintRaw := sha256.Sum256 (_data)
|
||||
_fingerprint := hex.EncodeToString (_fingerprintRaw[:])
|
||||
|
||||
_wasStored, _ := _stored[_fingerprint]
|
||||
_wasStored, _ := _context.storedData[_fingerprint]
|
||||
|
||||
if ! _wasStored {
|
||||
_key := fmt.Sprintf ("%s:%s", NamespaceDataContent, _fingerprint)
|
||||
if _debug {
|
||||
log.Printf ("[ ] ++ %s", _key)
|
||||
}
|
||||
if _error := _cdbWriter.Put ([]byte (_key), _data); _error != nil {
|
||||
return _error
|
||||
if (_dataType == "") && (_name != "") {
|
||||
_extension := filepath.Ext (_pathInArchive)
|
||||
if _extension != "" {
|
||||
_extension = _extension[1:]
|
||||
}
|
||||
_dataType, _ = MimeTypesByExtension[_extension]
|
||||
}
|
||||
if _dataType == "" {
|
||||
_dataType = http.DetectContentType (_data)
|
||||
}
|
||||
if _dataType == "" {
|
||||
_dataType = MimeTypeRaw
|
||||
}
|
||||
|
||||
if ! _wasStored {
|
||||
_key := fmt.Sprintf ("%s:%s", NamespaceDataContentType, _fingerprint)
|
||||
if _dataType == "" {
|
||||
_dataType = http.DetectContentType (_data)
|
||||
|
||||
var _dataEncoding string
|
||||
if _data_0, _dataEncoding_0, _error := Compress (_data, _context.compress); _error == nil {
|
||||
_data = _data_0
|
||||
_dataEncoding = _dataEncoding_0
|
||||
}
|
||||
if _debug {
|
||||
log.Printf ("[ ] ++ %s %s", _key, _dataType)
|
||||
|
||||
_metadata := make (map[string]string, 16)
|
||||
_metadata["content-type"] = _dataType
|
||||
_metadata["content-encoding"] = _dataEncoding
|
||||
_metadata["etag"] = _fingerprint
|
||||
|
||||
var _metadataRaw []byte
|
||||
if _metadataRaw_0, _error := MetadataEncode (_metadata); _error == nil {
|
||||
_metadataRaw = _metadataRaw_0
|
||||
} else {
|
||||
return "", _error
|
||||
}
|
||||
if _error := _cdbWriter.Put ([]byte (_key), []byte (_dataType)); _error != nil {
|
||||
return _error
|
||||
|
||||
{
|
||||
_key := fmt.Sprintf ("%s:%s", NamespaceDataContent, _fingerprint)
|
||||
if _context.debug {
|
||||
log.Printf ("[ ] ++ %s", _key)
|
||||
}
|
||||
if _error := _context.cdbWriter.Put ([]byte (_key), _data); _error != nil {
|
||||
return "", _error
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
_key := fmt.Sprintf ("%s:%s", NamespaceDataMetadata, _fingerprint)
|
||||
if _context.debug {
|
||||
log.Printf ("[ ] ++ %s", _key)
|
||||
}
|
||||
if _error := _context.cdbWriter.Put ([]byte (_key), _metadataRaw); _error != nil {
|
||||
return "", _error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if _namespace != "" {
|
||||
_key := fmt.Sprintf ("%s:%s", _namespace, _pathInArchive)
|
||||
if _debug {
|
||||
log.Printf ("[ ] ++ %s", _key)
|
||||
if _context.debug {
|
||||
log.Printf ("[ ] ++ %s %s", _key, _fingerprint)
|
||||
}
|
||||
if _error := _cdbWriter.Put ([]byte (_key), []byte (_fingerprint)); _error != nil {
|
||||
return _error
|
||||
if _error := _context.cdbWriter.Put ([]byte (_key), []byte (_fingerprint)); _error != nil {
|
||||
return "", _error
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
return _dataType, nil
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
func walkPath (_cdbWriter *cdb.Writer, _path string, _prefix string, _name string, _recursed map[string]uint, _stored map[string]bool, _debug bool) (error) {
|
||||
func walkPath (_context *context, _path string, _prefix string, _name string, _recursed map[string]uint) (error) {
|
||||
|
||||
if _recursed == nil {
|
||||
_recursed = make (map[string]uint, 128)
|
||||
}
|
||||
if _stored == nil {
|
||||
_stored = make (map[string]bool, 16 * 1024)
|
||||
}
|
||||
|
||||
_pathInArchive := filepath.Join (_prefix, _name)
|
||||
|
||||
|
@ -166,16 +213,16 @@ func walkPath (_cdbWriter *cdb.Writer, _path string, _prefix string, _name strin
|
|||
_pathResolved = _path
|
||||
}
|
||||
|
||||
if _isSymlink && _debug {
|
||||
if _isSymlink && _context.debug {
|
||||
log.Printf ("[ ] ~~ %s -> %s\n", _pathInArchive, _pathResolved)
|
||||
}
|
||||
|
||||
if _stat.Mode () .IsRegular () {
|
||||
|
||||
if _debug {
|
||||
if _context.debug {
|
||||
log.Printf ("[ ] ## %s\n", _pathInArchive)
|
||||
}
|
||||
return archiveFile (_cdbWriter, _pathResolved, _pathInArchive, _name, _stat, _stored, _debug)
|
||||
return archiveFile (_context, _pathResolved, _pathInArchive, _name, _stat)
|
||||
|
||||
} else if _stat.Mode () .IsDir () {
|
||||
|
||||
|
@ -186,7 +233,7 @@ func walkPath (_cdbWriter *cdb.Writer, _path string, _prefix string, _name strin
|
|||
}
|
||||
_recursed[_pathResolved] = _wasRecursed + 1
|
||||
|
||||
if _debug {
|
||||
if _context.debug {
|
||||
log.Printf ("[ ] >> %s\n", _pathInArchive)
|
||||
}
|
||||
|
||||
|
@ -203,7 +250,7 @@ func walkPath (_cdbWriter *cdb.Writer, _path string, _prefix string, _name strin
|
|||
_name := _stat.Name ()
|
||||
_names = append (_names, _name)
|
||||
_stats[_name] = _stat
|
||||
if _error := walkPath (_cdbWriter, filepath.Join (_path, _name), _prefix, _name, _recursed, _stored, _debug); _error != nil {
|
||||
if _error := walkPath (_context, filepath.Join (_path, _name), _prefix, _name, _recursed); _error != nil {
|
||||
return _error
|
||||
}
|
||||
}
|
||||
|
@ -217,13 +264,13 @@ func walkPath (_cdbWriter *cdb.Writer, _path string, _prefix string, _name strin
|
|||
|
||||
sort.Strings (_names)
|
||||
|
||||
if _debug {
|
||||
if _context.debug {
|
||||
log.Printf ("[ ] << %s\n", _pathInArchive)
|
||||
}
|
||||
|
||||
if _debug {
|
||||
if _context.debug {
|
||||
log.Printf ("[ ] <> %s\n", _pathInArchive)
|
||||
if _error := archiveFolder (_cdbWriter, _pathResolved, _pathInArchive, _names, _stats, _stored, _debug); _error != nil {
|
||||
if _error := archiveFolder (_context, _pathResolved, _pathInArchive, _names, _stats); _error != nil {
|
||||
return _error
|
||||
}
|
||||
}
|
||||
|
@ -249,6 +296,7 @@ func main_0 () (error) {
|
|||
|
||||
var _sourcesFolder string
|
||||
var _archiveFile string
|
||||
var _compress string
|
||||
var _debug bool
|
||||
|
||||
{
|
||||
|
@ -256,12 +304,14 @@ func main_0 () (error) {
|
|||
|
||||
_sourcesFolder_0 := _flags.String ("sources", "", "<path>")
|
||||
_archiveFile_0 := _flags.String ("archive", "", "<path>")
|
||||
_compress_0 := _flags.String ("compress", "", "gzip | brotli")
|
||||
_debug_0 := _flags.Bool ("debug", false, "")
|
||||
|
||||
FlagsParse (_flags, 0, 0)
|
||||
|
||||
_sourcesFolder = *_sourcesFolder_0
|
||||
_archiveFile = *_archiveFile_0
|
||||
_compress = *_compress_0
|
||||
_debug = *_debug_0
|
||||
|
||||
if _sourcesFolder == "" {
|
||||
|
@ -280,7 +330,14 @@ func main_0 () (error) {
|
|||
AbortError (_error, "[85234ba0] failed creating archive (while opening)!")
|
||||
}
|
||||
|
||||
if _error := walkPath (_cdbWriter, _sourcesFolder, "/", "", nil, nil, _debug); _error != nil {
|
||||
_context := & context {
|
||||
cdbWriter : _cdbWriter,
|
||||
storedData : make (map[string]bool, 16 * 1024),
|
||||
compress : _compress,
|
||||
debug : _debug,
|
||||
}
|
||||
|
||||
if _error := walkPath (_context, _sourcesFolder, "/", "", nil); _error != nil {
|
||||
AbortError (_error, "[b6a19ef4] failed walking folder!")
|
||||
}
|
||||
|
||||
|
|
|
@ -29,15 +29,15 @@ func (_server *server) ServeHTTP (_response http.ResponseWriter, _request *http.
|
|||
|
||||
_responseHeaders := _response.Header ()
|
||||
|
||||
_responseHeaders.Set ("Content-Security-Policy", "upgrade-insecure-requests")
|
||||
// _responseHeaders.Set ("Content-Security-Policy", "upgrade-insecure-requests")
|
||||
_responseHeaders.Set ("Referrer-Policy", "strict-origin-when-cross-origin")
|
||||
_responseHeaders.Set ("X-Frame-Options", "SAMEORIGIN")
|
||||
_responseHeaders.Set ("X-Content-Type-Options", "nosniff")
|
||||
_responseHeaders.Set ("X-content-type-Options", "nosniff")
|
||||
_responseHeaders.Set ("X-XSS-Protection", "1; mode=block")
|
||||
|
||||
_responseHeaders.Set ("Date", _timestampHttp)
|
||||
_responseHeaders.Set ("Last-Modified", _timestampHttp)
|
||||
_responseHeaders.Set ("Age", "0")
|
||||
_responseHeaders.Set ("date", _timestampHttp)
|
||||
_responseHeaders.Set ("last-modified", _timestampHttp)
|
||||
_responseHeaders.Set ("age", "0")
|
||||
|
||||
_method := _request.Method
|
||||
_path := _request.URL.Path
|
||||
|
@ -54,12 +54,12 @@ func (_server *server) ServeHTTP (_response http.ResponseWriter, _request *http.
|
|||
if (_path != "/") && (_path[len (_path) - 1] == '/') {
|
||||
_path_0 = _path[: len (_path) - 1]
|
||||
}
|
||||
for _, _namespace := range []string {NamespaceFilesContent, NamespaceFoldersContent, NamespaceFoldersMetadata} {
|
||||
for _, _namespace := range []string {NamespaceFilesContent, NamespaceFoldersContent, NamespaceFoldersEntries} {
|
||||
_key := fmt.Sprintf ("%s:%s", _namespace, _path_0)
|
||||
if _value, _error := _server.cdbReader.Get ([]byte (_key)); _error == nil {
|
||||
if _value != nil {
|
||||
_fingerprint = string (_value)
|
||||
if ((_namespace == NamespaceFoldersContent) || (_namespace == NamespaceFoldersMetadata)) && (_path == _path_0) && (_path != "/") {
|
||||
if ((_namespace == NamespaceFoldersContent) || (_namespace == NamespaceFoldersEntries)) && (_path == _path_0) && (_path != "/") {
|
||||
_server.ServeRedirect (_response, http.StatusTemporaryRedirect, _path + "/")
|
||||
return
|
||||
}
|
||||
|
@ -77,9 +77,10 @@ func (_server *server) ServeHTTP (_response http.ResponseWriter, _request *http.
|
|||
_server.ServeError (_response, http.StatusNotFound, nil)
|
||||
} else {
|
||||
_data, _dataContentType := FaviconData ()
|
||||
_responseHeaders.Set ("Content-Type", _dataContentType)
|
||||
_responseHeaders.Set ("Cache-Control", "public, immutable, max-age=3600")
|
||||
_responseHeaders.Set ("ETag", "f00f5f99bb3d45ef9806547fe5fe031a")
|
||||
_responseHeaders.Set ("content-type", _dataContentType)
|
||||
_responseHeaders.Set ("content-encoding", "identity")
|
||||
_responseHeaders.Set ("etag", "f00f5f99bb3d45ef9806547fe5fe031a")
|
||||
_responseHeaders.Set ("cache-control", "public, immutable, max-age=3600")
|
||||
_response.WriteHeader (http.StatusOK)
|
||||
_response.Write (_data)
|
||||
}
|
||||
|
@ -92,22 +93,30 @@ func (_server *server) ServeHTTP (_response http.ResponseWriter, _request *http.
|
|||
if _value, _error := _server.cdbReader.Get ([]byte (_key)); _error == nil {
|
||||
if _value != nil {
|
||||
_data = _value
|
||||
} else {
|
||||
_server.ServeError (_response, http.StatusInternalServerError, fmt.Errorf ("[0165c193] missing data content: `%s`", _fingerprint))
|
||||
return
|
||||
}
|
||||
} else {
|
||||
_server.ServeError (_response, http.StatusInternalServerError, _error)
|
||||
return
|
||||
}
|
||||
}
|
||||
if _data == nil {
|
||||
_server.ServeError (_response, http.StatusNotFound, nil)
|
||||
return
|
||||
}
|
||||
|
||||
var _metadata [][2]string
|
||||
{
|
||||
_key := fmt.Sprintf ("%s:%s", NamespaceDataContentType, _fingerprint)
|
||||
_key := fmt.Sprintf ("%s:%s", NamespaceDataMetadata, _fingerprint)
|
||||
if _value, _error := _server.cdbReader.Get ([]byte (_key)); _error == nil {
|
||||
if _value != nil {
|
||||
_responseHeaders.Set ("Content-Type", string (_value))
|
||||
if _metadata_0, _error := MetadataDecode (_value); _error == nil {
|
||||
_metadata = _metadata_0
|
||||
} else {
|
||||
_server.ServeError (_response, http.StatusInternalServerError, _error)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
_server.ServeError (_response, http.StatusInternalServerError, fmt.Errorf ("[e8702411] missing data metadata: `%s`", _fingerprint))
|
||||
return
|
||||
}
|
||||
} else {
|
||||
_server.ServeError (_response, http.StatusInternalServerError, _error)
|
||||
|
@ -119,8 +128,11 @@ func (_server *server) ServeHTTP (_response http.ResponseWriter, _request *http.
|
|||
log.Printf ("[dd] [b15f3cad] serving for `%s`...\n", _path)
|
||||
}
|
||||
|
||||
_responseHeaders.Set ("Cache-Control", "public, immutable, max-age=3600")
|
||||
_responseHeaders.Set ("ETag", _fingerprint)
|
||||
for _, _metadata := range _metadata {
|
||||
_responseHeaders.Set (_metadata[0], _metadata[1])
|
||||
}
|
||||
_responseHeaders.Set ("cache-control", "public, immutable, max-age=3600")
|
||||
|
||||
_response.WriteHeader (http.StatusOK)
|
||||
_response.Write (_data)
|
||||
}
|
||||
|
@ -139,19 +151,27 @@ func (_server *server) ServeRedirect (_response http.ResponseWriter, _status uin
|
|||
return
|
||||
}
|
||||
|
||||
_responseHeaders.Set ("Content-Type", "text/plain; charset=utf-8")
|
||||
_responseHeaders.Set ("Cache-Control", "public, immutable, max-age=3600")
|
||||
_responseHeaders.Set ("ETag", "7aa652d8d607b85808c87c1c2105fbb5")
|
||||
_responseHeaders.Set ("Location", _url)
|
||||
_responseHeaders.Set ("content-type", MimeTypeText)
|
||||
_responseHeaders.Set ("content-encoding", "identity")
|
||||
_responseHeaders.Set ("etag", "7aa652d8d607b85808c87c1c2105fbb5")
|
||||
_responseHeaders.Set ("cache-control", "public, immutable, max-age=3600")
|
||||
_responseHeaders.Set ("location", _url)
|
||||
|
||||
_response.WriteHeader (int (_status))
|
||||
_response.Write ([]byte (fmt.Sprintf ("[%d] %s", _status, _url)))
|
||||
}
|
||||
|
||||
|
||||
func (_server *server) ServeError (_response http.ResponseWriter, _status uint, _error error) () {
|
||||
_response.Header () .Set ("Content-Type", "text/plain; charset=utf-8")
|
||||
_responseHeaders := _response.Header ()
|
||||
|
||||
_responseHeaders.Set ("content-type", MimeTypeText)
|
||||
_responseHeaders.Set ("content-encoding", "identity")
|
||||
_responseHeaders.Set ("cache-control", "no-cache")
|
||||
|
||||
_response.WriteHeader (int (_status))
|
||||
_response.Write ([]byte (fmt.Sprintf ("[%d]", _status)))
|
||||
|
||||
LogError (_error, "")
|
||||
}
|
||||
|
||||
|
@ -205,6 +225,10 @@ func main_0 () (error) {
|
|||
debug : _debug,
|
||||
}
|
||||
|
||||
if _debug {
|
||||
log.Printf ("[ii] [f11e4e37] listening on `http://%s/`", _bind)
|
||||
}
|
||||
|
||||
if _error := http.ListenAndServe (_bind, _server); _error != nil {
|
||||
AbortError (_error, "[44f45c67] failed starting server!")
|
||||
}
|
||||
|
|
70
sources/lib/compress.go
Normal file
70
sources/lib/compress.go
Normal file
|
@ -0,0 +1,70 @@
|
|||
|
||||
|
||||
package lib
|
||||
|
||||
|
||||
import "bytes"
|
||||
import "compress/gzip"
|
||||
import "fmt"
|
||||
|
||||
import "github.com/google/brotli/go/cbrotli"
|
||||
|
||||
|
||||
|
||||
|
||||
func Compress (_data []byte, _algorithm string) ([]byte, string, error) {
|
||||
switch _algorithm {
|
||||
case "gz", "gzip" :
|
||||
return CompressGzip (_data)
|
||||
case "br", "brotli" :
|
||||
return CompressBrotli (_data)
|
||||
case "", "none", "identity" :
|
||||
return _data, "identity", nil
|
||||
default :
|
||||
return nil, "", fmt.Errorf ("[ea23f966] invalid compression algorithm `%s`", _algorithm)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
func CompressGzip (_data []byte) ([]byte, string, error) {
|
||||
|
||||
_buffer := & bytes.Buffer {}
|
||||
|
||||
var _encoder *gzip.Writer
|
||||
if _encoder_0, _error := gzip.NewWriterLevel (_buffer, gzip.BestCompression); _error == nil {
|
||||
_encoder = _encoder_0
|
||||
} else {
|
||||
return nil, "", _error
|
||||
}
|
||||
|
||||
if _, _error := _encoder.Write (_data); _error != nil {
|
||||
return nil, "", _error
|
||||
}
|
||||
if _error := _encoder.Close (); _error != nil {
|
||||
return nil, "", _error
|
||||
}
|
||||
|
||||
_data = _buffer.Bytes ()
|
||||
return _data, "gzip", nil
|
||||
}
|
||||
|
||||
|
||||
func CompressBrotli (_data []byte) ([]byte, string, error) {
|
||||
|
||||
_buffer := & bytes.Buffer {}
|
||||
|
||||
_encoder := cbrotli.NewWriter (_buffer, cbrotli.WriterOptions { Quality : 11, LGWin : 24})
|
||||
|
||||
if _, _error := _encoder.Write (_data); _error != nil {
|
||||
return nil, "", _error
|
||||
}
|
||||
if _error := _encoder.Close (); _error != nil {
|
||||
return nil, "", _error
|
||||
}
|
||||
|
||||
_data = _buffer.Bytes ()
|
||||
return _data, "br", nil
|
||||
}
|
||||
|
46
sources/lib/errors.go
Normal file
46
sources/lib/errors.go
Normal file
|
@ -0,0 +1,46 @@
|
|||
|
||||
|
||||
package lib
|
||||
|
||||
|
||||
import "log"
|
||||
import "os"
|
||||
import "regexp"
|
||||
|
||||
|
||||
|
||||
|
||||
func AbortError (_error error, _message string) () {
|
||||
LogError (_error, _message)
|
||||
log.Printf ("[!!] [89251d36] aborting!\n")
|
||||
os.Exit (1)
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
func LogError (_error error, _message string) () {
|
||||
|
||||
if _message != "#" {
|
||||
if (_message == "") && (_error != nil) {
|
||||
_message = "[70d7e7c6] unexpected error encountered!";
|
||||
}
|
||||
if _message != "" {
|
||||
log.Printf ("[ee] %s\n", _message)
|
||||
}
|
||||
}
|
||||
|
||||
if _error != nil {
|
||||
_errorString := _error.Error ()
|
||||
if logErrorMessageProper.MatchString (_errorString) {
|
||||
log.Printf ("[ee] %s\n", _errorString)
|
||||
} else {
|
||||
log.Printf ("[ee] [c776ae31] %q\n", _errorString)
|
||||
log.Printf ("[ee] [ddd6baae] %#v\n", _error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
var logErrorMessageProper *regexp.Regexp = regexp.MustCompile (`\A\[[0-9a-f]{8}\] [^\n]+\z`)
|
||||
|
|
@ -7,7 +7,6 @@ import "flag"
|
|||
import "fmt"
|
||||
import "log"
|
||||
import "os"
|
||||
import "regexp"
|
||||
|
||||
|
||||
|
||||
|
@ -51,40 +50,3 @@ func FlagsParse (_flags *flag.FlagSet, _argumentsMin uint, _argumentsMax uint) (
|
|||
return _flags.Args ()
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
func LogError (_error error, _message string) () {
|
||||
|
||||
if _message != "#" {
|
||||
if (_message == "") && (_error != nil) {
|
||||
_message = "[70d7e7c6] unexpected error encountered!";
|
||||
}
|
||||
if _message != "" {
|
||||
log.Printf ("[ee] %s\n", _message)
|
||||
}
|
||||
}
|
||||
|
||||
if _error != nil {
|
||||
_errorString := _error.Error ()
|
||||
if _matches, _matchesError := regexp.MatchString (`^\[[0-9a-f]{8}\] [^\n]+$`, _errorString); _matchesError == nil {
|
||||
if _matches {
|
||||
log.Printf ("[ee] %s\n", _errorString)
|
||||
} else {
|
||||
log.Printf ("[ee] [c776ae31] %q\n", _errorString)
|
||||
log.Printf ("[ee] [ddd6baae] %#v\n", _error)
|
||||
}
|
||||
} else {
|
||||
log.Printf ("[ee] [609a0410] %q\n", _errorString)
|
||||
log.Printf ("[ee] [2ddce4bf] %#v\n", _error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func AbortError (_error error, _message string) () {
|
||||
LogError (_error, _message)
|
||||
log.Printf ("[!!] [89251d36] aborting!\n")
|
||||
os.Exit (1)
|
||||
}
|
||||
|
58
sources/lib/metadata.go
Normal file
58
sources/lib/metadata.go
Normal file
|
@ -0,0 +1,58 @@
|
|||
|
||||
|
||||
package lib
|
||||
|
||||
|
||||
import "bytes"
|
||||
import "fmt"
|
||||
import "regexp"
|
||||
|
||||
|
||||
|
||||
|
||||
func MetadataEncode (_metadata map[string]string) ([]byte, error) {
|
||||
|
||||
_buffer := & bytes.Buffer {}
|
||||
|
||||
for _key, _value := range _metadata {
|
||||
if ! metadataKeyRegex.MatchString (_key) {
|
||||
return nil, fmt.Errorf ("[2f761e02] invalid metadata key: `%s`", _key)
|
||||
}
|
||||
if _value == "" {
|
||||
continue
|
||||
}
|
||||
if ! metadataValueRegex.MatchString (_value) {
|
||||
return nil, fmt.Errorf ("[e8faf5bd] invalid metadata value: `%s`", _value)
|
||||
}
|
||||
_buffer.Write ([]byte (_key))
|
||||
_buffer.Write ([]byte (" : "))
|
||||
_buffer.Write ([]byte (_value))
|
||||
_buffer.Write ([]byte ("\n"))
|
||||
}
|
||||
|
||||
_data := _buffer.Bytes ()
|
||||
return _data, nil
|
||||
}
|
||||
|
||||
|
||||
|
||||
func MetadataDecode (_data []byte) ([][2]string, error) {
|
||||
_metadata := make ([][2]string, 0, 16)
|
||||
for _, _data := range bytes.Split (_data, []byte ("\n")) {
|
||||
if len (_data) == 0 {
|
||||
continue
|
||||
}
|
||||
_data := bytes.SplitN (_data, []byte (" : "), 2)
|
||||
if len (_data) == 2 {
|
||||
_metadata = append (_metadata, [2]string { string (_data[0]), string (_data[1]) })
|
||||
} else {
|
||||
return nil, fmt.Errorf ("[7cb30bf7] invalid metadata encoding")
|
||||
}
|
||||
}
|
||||
return _metadata, nil
|
||||
}
|
||||
|
||||
|
||||
var metadataKeyRegex *regexp.Regexp = regexp.MustCompile (`\A[a-z0-9](?:[a-z0-9-]?[a-z]+)*\z`)
|
||||
var metadataValueRegex *regexp.Regexp = regexp.MustCompile (`\A[[:graph:]](?: ?[[:graph:]]+)*\z`)
|
||||
|
40
sources/lib/mime.go
Normal file
40
sources/lib/mime.go
Normal file
|
@ -0,0 +1,40 @@
|
|||
|
||||
|
||||
package lib
|
||||
|
||||
|
||||
|
||||
|
||||
const MimeTypeText = "text/plain; charset=utf-8"
|
||||
const MimeTypeCsv = "text/csv; charset=utf-8"
|
||||
|
||||
const MimeTypeHtml = "text/html; charset=utf-8"
|
||||
const MimeTypeCss = "text/css; charset=utf-8"
|
||||
const MimeTypeJs = "application/javascript; charset=utf-8"
|
||||
const MimeTypeJson = "application/json; charset=utf-8"
|
||||
|
||||
const MimeTypeXml = "application/xml; charset=utf-8"
|
||||
const MimeTypeXslt = "application/xslt+xml; charset=utf-8"
|
||||
const MimeTypeXhtml = "application/xhtml+xml; charset=utf-8"
|
||||
|
||||
|
||||
const MimeTypeRaw = "application/octet-stream"
|
||||
|
||||
|
||||
var MimeTypesByExtension map[string]string = map[string]string {
|
||||
|
||||
"txt" : MimeTypeText,
|
||||
"csv" : MimeTypeCsv,
|
||||
"tsv" : MimeTypeCsv,
|
||||
|
||||
"html" : MimeTypeHtml,
|
||||
"css" : MimeTypeCss,
|
||||
"js" : MimeTypeJs,
|
||||
"json" : MimeTypeJson,
|
||||
|
||||
"xml" : MimeTypeXml,
|
||||
"xslt" : MimeTypeXml,
|
||||
"xhtml" : MimeTypeXhtml,
|
||||
|
||||
}
|
||||
|
|
@ -7,8 +7,8 @@ package lib
|
|||
|
||||
const NamespaceFilesContent = "files:content"
|
||||
const NamespaceFoldersContent = "folders:content"
|
||||
const NamespaceFoldersMetadata = "folders:metadata"
|
||||
const NamespaceFoldersEntries = "folders:entries"
|
||||
|
||||
const NamespaceDataContent = "data:content"
|
||||
const NamespaceDataContentType = "data:content-type"
|
||||
const NamespaceDataMetadata = "data:metadata"
|
||||
|
||||
|
|
Loading…
Reference in a new issue