dance to chunk the message into 64KB pieces...
This commit is contained in:
parent
57c00f4f6e
commit
34cc5f851b
4 changed files with 125 additions and 29 deletions
20
app/c.js
20
app/c.js
|
@ -6,7 +6,16 @@ function captureFrontMatterAndMarkdown() {
|
|||
if(key.endsWith('[]')){
|
||||
let kie = key.slice(0, -2);
|
||||
(kie in formDataJSON) || (formDataJSON[kie] = [])
|
||||
console.log("[]:", kie, value)
|
||||
formDataJSON[kie].push(value);
|
||||
} else if (key.endsWith('[j]')) {
|
||||
let kie = key.slice(0, -3);
|
||||
(kie in formDataJSON) || (formDataJSON[kie] = [])
|
||||
let vA = JSON.parse(value)
|
||||
console.log("[j]:", kie, value, vA)
|
||||
if (Array.isArray(vA)) {
|
||||
vA.forEach(v => formDataJSON[kie].push(v))
|
||||
}
|
||||
} else if (['draft', 'iscjklanguage'].includes(key) && value!="true") {
|
||||
return
|
||||
} else if (key == "relpath"){
|
||||
|
@ -20,11 +29,15 @@ function captureFrontMatterAndMarkdown() {
|
|||
formDataJSON[key] = value
|
||||
}
|
||||
});
|
||||
|
||||
const content = document.querySelector('textarea').value
|
||||
|
||||
let abstract = document.getElementById('pageabstract')
|
||||
if (abstract) {
|
||||
formDataJSON['abstract'] = abstract.value
|
||||
}
|
||||
const content = document.getElementById('pagecontent').value
|
||||
let publish = formData.get("publish") == "on" ? true : false
|
||||
let offline = formData.get("offline") == "on" ? true : false
|
||||
return {
|
||||
let ret = {
|
||||
"hugofrontmatter": JSON.stringify(formDataJSON),
|
||||
"hugocontent": content,
|
||||
"filepath": filepath,
|
||||
|
@ -34,6 +47,7 @@ function captureFrontMatterAndMarkdown() {
|
|||
"offline": offline,
|
||||
"protocol": document.location.protocol.substr(0,4)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
let sb = document.getElementById('sandpointsButton')
|
||||
|
|
61
app/main.js
61
app/main.js
|
@ -1,3 +1,19 @@
|
|||
var messageQueue = {};
|
||||
|
||||
// https://stackoverflow.com/a/52171480
|
||||
// simple 53-bit hash by https://github.com/bryc
|
||||
const cyrb53 = function(str, seed = 0) {
|
||||
let h1 = 0xdeadbeef ^ seed, h2 = 0x41c6ce57 ^ seed;
|
||||
for (let i = 0, ch; i < str.length; i++) {
|
||||
ch = str.charCodeAt(i);
|
||||
h1 = Math.imul(h1 ^ ch, 2654435761);
|
||||
h2 = Math.imul(h2 ^ ch, 1597334677);
|
||||
}
|
||||
h1 = Math.imul(h1 ^ (h1>>>16), 2246822507) ^ Math.imul(h2 ^ (h2>>>13), 3266489909);
|
||||
h2 = Math.imul(h2 ^ (h2>>>16), 2246822507) ^ Math.imul(h1 ^ (h1>>>13), 3266489909);
|
||||
return 4294967296 * (2097151 & h2) + (h1>>>0);
|
||||
};
|
||||
|
||||
chrome.runtime.onMessage.addListener((m, n)=> {
|
||||
var nh = chrome.runtime.connectNative("org.sandpoints.chromeext")
|
||||
nh.onDisconnect.addListener((e, err)=> {
|
||||
|
@ -6,18 +22,55 @@ chrome.runtime.onMessage.addListener((m, n)=> {
|
|||
})
|
||||
console.log("message:", m)
|
||||
console.log("sender:", n)
|
||||
nh.postMessage(m);
|
||||
console.log("message length:", JSON.stringify(m).length)
|
||||
const messageLength = JSON.stringify(m).length
|
||||
const digestHex = cyrb53(JSON.stringify(m)).toString();
|
||||
if (messageLength > 61440) {
|
||||
const contentLength = m.hugocontent.length
|
||||
const fragmentLength = 61440 - (messageLength - JSON.stringify(m.hugocontent).length);
|
||||
let hugocontent = ("_" + m.hugocontent).slice(1); // deep copy
|
||||
messageQueue[digestHex] = [];
|
||||
let x = 0;
|
||||
while ((x+1)*fragmentLength < contentLength+fragmentLength) {
|
||||
mm = {...m};
|
||||
mm.multipart = true;
|
||||
mm.multipartid = digestHex;
|
||||
mm.multipartnext = true;
|
||||
mm.hugocontent = hugocontent.slice(x*fragmentLength, (x+1)*fragmentLength == contentLength ? contentLength : Math.min(contentLength, (x+1)*fragmentLength))
|
||||
console.log(x*fragmentLength, (x+1)*fragmentLength, "hugocontent(start):", m.hugocontent.slice(0, 32), "hugocontent(end):", m.hugocontent.slice(m.hugocontent.length - 32))
|
||||
messageQueue[digestHex].push(mm)
|
||||
x++;
|
||||
}
|
||||
nh.postMessage(messageQueue[digestHex].shift())
|
||||
} else {
|
||||
console.log("messageLength less than 61440:", messageLength)
|
||||
m["multipart"] = false;
|
||||
m["multipartnext"] = false;
|
||||
m["multipartid"] = digestHex;
|
||||
nh.postMessage(m);
|
||||
}
|
||||
|
||||
nh.onMessage.addListener((m, n)=> {
|
||||
console.log("native host message:", m)
|
||||
console.log("native host sender:", n)
|
||||
if (m.Response != "false") {
|
||||
console.log("native host sender:", n)
|
||||
if (m.action == "redirect") {
|
||||
chrome.tabs.query({ active: true, currentWindow: true }, (tabs) => {
|
||||
chrome.tabs.sendMessage(tabs[0].id, { msg: m.response }, (response) => {
|
||||
chrome.tabs.sendMessage(tabs[0].id, { msg: m.actionid }, (response) => {
|
||||
if (response) {
|
||||
console.log(response)
|
||||
}
|
||||
});
|
||||
})
|
||||
console.log(m.actionid)
|
||||
} else if (m.action == "nextMessage") {
|
||||
if (messageQueue[m.actionid].length == 1) {
|
||||
let mm = messageQueue[m.actionid][0];
|
||||
mm.multipartnext = false;
|
||||
messageQueue[m.actionid] = [];
|
||||
nh.postMessage(mm);
|
||||
} else {
|
||||
nh.postMessage(messageQueue[m.actionid].shift())
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
|
|
@ -35,10 +35,13 @@ var (
|
|||
var nativeEndian binary.ByteOrder
|
||||
|
||||
// bufferSize used to set size of IO buffer - adjust to accommodate message payloads
|
||||
var bufferSize = 8192 * 32
|
||||
var bufferSize = 8192 * 8
|
||||
|
||||
// IncomingMessage represents a message sent to the native host.
|
||||
type IncomingMessage struct {
|
||||
Multipart bool `json:"multipart"`
|
||||
MultipartID string `json:"multipartid"`
|
||||
MultipartNext bool `json:"multipartnext"`
|
||||
Hugocontent string `json:"hugocontent"`
|
||||
Hugofrontmatter string `json:"hugofrontmatter"`
|
||||
Filepath string `json:"filepath"`
|
||||
|
@ -51,8 +54,8 @@ type IncomingMessage struct {
|
|||
|
||||
// OutgoingMessage respresents a response to an incoming message query.
|
||||
type OutgoingMessage struct {
|
||||
Query string `json:"query"`
|
||||
Response string `json:"response"`
|
||||
Action string `json:"action"`
|
||||
ActionID string `json:"actionid"`
|
||||
}
|
||||
|
||||
// Init initializes logger and determines native byte order.
|
||||
|
@ -111,11 +114,11 @@ func read() {
|
|||
|
||||
// read the content of the message from buffer
|
||||
content := make([]byte, lengthNum)
|
||||
_, err := s.Read(content)
|
||||
n, err := s.Read(content)
|
||||
if err != nil && err != io.EOF {
|
||||
Error.Fatal(err)
|
||||
}
|
||||
|
||||
Trace.Printf("Read %d bytes out of announced %d.", n, lengthNum)
|
||||
// message has been read, now parse and process
|
||||
parseMessage(content)
|
||||
}
|
||||
|
@ -160,7 +163,10 @@ func commitChangeToGit(iMsg IncomingMessage) {
|
|||
// err = ioutil.WriteFile(iMsg.Filepath, []byte(iMsg.Hugopage), 0644)
|
||||
// check(err)
|
||||
var frontmatter map[string]interface{}
|
||||
json.Unmarshal([]byte(iMsg.Hugofrontmatter), &frontmatter)
|
||||
err = json.Unmarshal([]byte(iMsg.Hugofrontmatter), &frontmatter)
|
||||
if err != nil {
|
||||
Error.Printf("ERROR: Unmarshaling frontmatter JSON failed: %s", err)
|
||||
}
|
||||
Trace.Printf("Frontmatter JSON: %s", &frontmatter)
|
||||
tml, err := toml.TreeFromMap(frontmatter)
|
||||
if err != nil {
|
||||
|
@ -210,35 +216,58 @@ func triggerGitHook(gitpath string, publish bool, offline bool, protocol string)
|
|||
Trace.Printf("Git Hook Combined Output: \n%s", out)
|
||||
}
|
||||
|
||||
func commitAndRender(iMsg IncomingMessage) {
|
||||
commitChangeToGit(iMsg)
|
||||
triggerGitHook(iMsg.Gitpath, iMsg.Publish, iMsg.Offline, iMsg.Protocol)
|
||||
if iMsg.Protocol == "file" {
|
||||
send(OutgoingMessage{Action: "redirect", ActionID: iMsg.Publishpath})
|
||||
}
|
||||
}
|
||||
|
||||
// parseMessage parses incoming message
|
||||
func parseMessage(msg []byte) {
|
||||
iMsg := decodeMessage(msg)
|
||||
Trace.Printf("Message received: %s", msg)
|
||||
|
||||
commitChangeToGit(iMsg)
|
||||
triggerGitHook(iMsg.Gitpath, iMsg.Publish, iMsg.Offline, iMsg.Protocol)
|
||||
// start building outgoing json message
|
||||
oMsg := OutgoingMessage{
|
||||
Query: iMsg.Protocol,
|
||||
if iMsg.Multipart {
|
||||
tmpFile := filepath.Join(os.TempDir(), fmt.Sprintf("sandpoints_hugocontent_%s", iMsg.MultipartID))
|
||||
if iMsg.MultipartNext {
|
||||
file, err := os.OpenFile(tmpFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||
if err != nil {
|
||||
Error.Printf("Error: %v", err)
|
||||
} else {
|
||||
defer file.Close()
|
||||
_, err := file.WriteString(iMsg.Hugocontent)
|
||||
if err != nil {
|
||||
Error.Printf("file.WriteString err: %s", err)
|
||||
}
|
||||
Trace.Printf("Wrote multipart to: %s\n%s\n", iMsg.MultipartID, iMsg.Hugocontent[:300])
|
||||
send(OutgoingMessage{Action: "nextMessage", ActionID: iMsg.MultipartID})
|
||||
}
|
||||
} else {
|
||||
hugoContent, err := os.ReadFile(tmpFile)
|
||||
if err != nil {
|
||||
Error.Printf("Error reading file: %v", err)
|
||||
}
|
||||
iMsg.Hugocontent = string(hugoContent) + iMsg.Hugocontent
|
||||
err = os.RemoveAll(tmpFile)
|
||||
if err != nil {
|
||||
Error.Printf("Error: %v", err)
|
||||
}
|
||||
commitAndRender(iMsg)
|
||||
}
|
||||
} else {
|
||||
commitAndRender(iMsg)
|
||||
}
|
||||
|
||||
switch iMsg.Protocol {
|
||||
case "file":
|
||||
oMsg.Response = iMsg.Publishpath
|
||||
default:
|
||||
oMsg.Response = "false"
|
||||
}
|
||||
|
||||
send(oMsg)
|
||||
}
|
||||
|
||||
// decodeMessage unmarshals incoming json request and returns query value.
|
||||
func decodeMessage(msg []byte) IncomingMessage {
|
||||
var iMsg IncomingMessage
|
||||
// msg = bytes.TrimRight(msg, "\x00")
|
||||
err := json.Unmarshal(msg, &iMsg)
|
||||
if err != nil {
|
||||
Error.Printf("Unable to unmarshal json to struct: %v", err)
|
||||
}
|
||||
Trace.Printf("iMsg.Multipart: %t, iMsg.MultipartID: %s, iMsg.MultipartNext: %t", iMsg.Multipart, iMsg.MultipartID, iMsg.MultipartNext)
|
||||
return iMsg
|
||||
}
|
||||
|
||||
|
|
Binary file not shown.
Loading…
Add table
Reference in a new issue