Browse Source

Add: Basic upload functionality is working, but still buggy and needs to be tested

Sebastian Kreisel 1 year ago
parent
commit
fc5014c1d3

+ 3 - 0
elfcom-backend/config/deploy.conf

@@ -16,5 +16,8 @@ deploy
 [Log]
 /var/elfcom/private/log
 
+[Upload]
+/var/elfcom/private/upload
+
 [Version]
 /var/elfcom/private/elfcom_version.txt

+ 4 - 1
elfcom-backend/config/dev.conf

@@ -15,7 +15,10 @@ development
 ../db/files
 
 [Log]
-./log
+../log
+
+[Upload]
+../upload
 
 [Version]
 ./elfcom_version.txt

+ 3 - 1
elfcom-backend/src/Config.hs

@@ -27,6 +27,7 @@ data SiteConfig = SiteConfig { develop :: Bool
                              , database :: T.Text
                              , filesDir :: T.Text
                              , logDir :: T.Text
+                             , uploadDir :: T.Text
                              } deriving Show
 
 data AppState = AppState { elfeckPerm :: [T.Text]
@@ -51,6 +52,7 @@ parseConfig configPath = do
     (f "[Database]" bs)
     (f "[Files]" bs)
     (f "[Log]" bs)
+    (f "[Upload]" bs)
   where blocks tx = map T.lines $ T.splitOn "\n\n" tx
         filterComments bs = map (filter (not . T.isPrefixOf "#")) bs
         f s bs = foldl T.append "" $ map (parseSection s) bs
@@ -63,7 +65,7 @@ parseConfig configPath = do
 
 spockConfig :: SiteConfig -> PoolOrConn conn -> T.Text ->
                IO (SpockCfg conn SessionVal AppState)
-spockConfig (SiteConfig dev sessDurs _ _ _ _) conn v = do
+spockConfig (SiteConfig dev sessDurs _ _ _ _ _) conn v = do
   let maxSessDur = maximum (map snd (sessDurs))
   sessConfig <- sessionConfig maxSessDur
   let perms = genElfeckPerm

+ 78 - 2
elfcom-backend/src/Endpoint/Files.hs

@@ -9,10 +9,16 @@ module Endpoint.Files where
 
 import Control.Monad.Trans
 import Data.Aeson hiding (json)
+import Data.Aeson.Text
+import qualified Data.ByteString as BS
 import Data.HVect hiding (head, tail, (!!), length)
 import Data.Maybe
 import Data.Monoid ((<>))
+import Data.List ((\\))
 import qualified Data.Text as T
+import qualified Data.Text.IO as TIO
+import Text.Read (readMaybe)
+import qualified Data.Text.Lazy.IO as TLIO
 import Web.Spock hiding (head, SessionId)
 import GHC.Generics
 
@@ -20,11 +26,13 @@ import Common
 import Model.Files
 
 
-handleFilesEndpoints :: ListContains n IsAdmin xs => T.Text -> App (HVect xs)
-handleFilesEndpoints filesD = do
+handleFilesEndpoints :: ListContains n IsAdmin xs => T.Text -> T.Text ->
+                        App (HVect xs)
+handleFilesEndpoints filesD uplDir = do
   post (baseRoute <//> "fetch") $ connectH (filesFetchHandler filesD)
   post (baseRoute <//> "load") $ connectH (filesLoadHandler filesD)
   post (baseRoute <//> "submit") $ connectH (filesSubmitHandler filesD)
+  post (baseRoute <//> "upload") $ connectH (filesUploadHandler filesD uplDir)
   where baseRoute = "api" <//> "admin" <//> "files"
 
 
@@ -114,6 +122,60 @@ filesSubmitHandler baseP (FilesDeleteReq dPath) = do
 
 -- ---------------------------
 
+filesUploadHandler :: T.Text -> T.Text -> FilesUploadReq ->
+                      Action ctx FilesUploadRsp
+filesUploadHandler baseP uplDir (FilesUploadHeaderReq dir nam uid num) = do
+  targetDirExists <- liftIO $ checkDir $ T.unpack (baseP <> "/" <> dir)
+  targetFileExists <- liftIO $ checkPath $ T.unpack (baseP <> "/" <> dir <>
+                                                     "/" <> nam)
+  case (targetDirExists, targetFileExists) of
+    (False, _) -> return $ FilesUploadError "Target dir invalid"
+    (_, True) -> return $ FilesUploadError "Target filename invalid"
+    _ -> do
+      let headerPath = uplDir <> "/" <> uid <> ".json"
+      headerExists <- liftIO $ checkFile $ T.unpack headerPath
+      case headerExists of
+        False -> do
+          liftIO (TLIO.writeFile (T.unpack headerPath)
+                  (encodeToLazyText (FilesUploadHeaderReq dir nam uid num)))
+        _ -> return ()
+      exChunks <- liftIO $ getExistingChunks uplDir uid
+      return $ FilesUploadOkay ([0..num-1] \\ exChunks)
+filesUploadHandler baseP uplDir (FilesUploadChunkReq uid num cont) = do
+  let pp = (uplDir <> "/" <> uid)
+  headerExists <- liftIO $ checkFile $ T.unpack (pp <> ".json")
+  case headerExists of
+    False -> return $ FilesUploadError "Found no header for chunk"
+    True -> do
+      headerBS <- liftIO $ BS.readFile (T.unpack (pp <> ".json"))
+      case decodeStrict' headerBS of
+        Nothing -> return $ FilesUploadError "Found no header for chunk"
+        Just header -> do
+          let totalNum = uhrNumChunks header
+          liftIO (TIO.writeFile (T.unpack pp ++ "_" ++ show num ++ ".chunk")
+                  cont)
+          exChunks <- liftIO $ getExistingChunks uplDir uid
+          let missingChunks = [0..(uhrNumChunks header)-1] \\ exChunks
+          case missingChunks of
+            [] -> return FilesUploadCompleted
+            chunks -> return $ FilesUploadOkay chunks
+
+-- -------------------
+
+-- Chunks are in the format sha1_numChunk.chunk
+getExistingChunks :: T.Text -> T.Text -> IO [Int]
+getExistingChunks uplDir uplName = do
+  uplContents <- getDirContents (DirContentsQuery (T.unpack uplDir) ""
+                                 (Just uplName) (Just ".chunk") Nothing)
+  case uplContents of
+    Nothing -> return []
+    Just conts -> do
+      let fls = map (T.pack . fst) (filter (\(_, isFile) -> isFile) conts)
+      let exChunks = mapMaybe (readMaybe . T.unpack) $
+                     mapMaybe (T.stripSuffix ".chunk") $
+                     mapMaybe (T.stripPrefix (uplName <> "_" )) fls
+      return exChunks
+
 getParentPath :: T.Text -> T.Text
 getParentPath p = T.intercalate "/" (init $ T.splitOn "/" p)
 
@@ -143,3 +205,17 @@ data FilesSubmitReq = FilesDeleteReq { delPath :: !T.Text }
 data FilesSubmitRsp = FilesSubmitOkay { sbmAction :: !T.Text }
                     | FilesSubmitFail { sbmError :: !T.Text }
                     deriving (Show, Eq, Generic, ToJSON, FromJSON)
+
+data FilesUploadReq = FilesUploadHeaderReq { uhrDir :: !T.Text
+                                           , uhrName :: !T.Text
+                                           , uhrUid :: !T.Text
+                                           , uhrNumChunks :: !Int
+                                           }
+                    | FilesUploadChunkReq { ucrUid :: !T.Text
+                                          , ucrChunkNum :: !Int
+                                          , ucrContent :: !T.Text }
+                    deriving (Show, Eq, Generic, ToJSON, FromJSON)
+data FilesUploadRsp = FilesUploadOkay { uplMissingChunks :: ![Int] }
+                    | FilesUploadCompleted
+                    | FilesUploadError { uplErrorMsg :: T.Text }
+                    deriving (Show, Eq, Generic, ToJSON, FromJSON)

+ 2 - 2
elfcom-backend/src/Main.hs

@@ -69,7 +69,7 @@ main = do
   runSpock 2000 (spock spockCfg (app config))
 
 app :: SiteConfig -> App ()
-app (SiteConfig isDev sessDur _ _ filesD logD) = do
+app (SiteConfig isDev sessDur _ _ filesD logD uplDir) = do
   st <- getState
   let v = dockerVersion st
 
@@ -130,7 +130,7 @@ app (SiteConfig isDev sessDur _ _ filesD logD) = do
       handleLogoutEndpoint
     prehook (authHook isDev) $ prehook (adminHook isDev) $ do
       handleEditEndpoints
-      handleFilesEndpoints filesD
+      handleFilesEndpoints filesD uplDir
       handleAccessLogEndpoints (accessLogLock st) logD
 
     -- API NOT FOUND

+ 3 - 2
elfcom-backend/src/Model/Files.hs

@@ -23,7 +23,8 @@ data DirContentsQuery = DirContentsQuery { dcqDBPath :: FilePath
 -- returns ( qpth / content, isFile ) without dbpath prefix
 getDirContents :: DirContentsQuery -> IO (Maybe [(FilePath, Bool)])
 getDirContents (DirContentsQuery dbpth qpth mpref msuff mcont) = do
-  let pth = dbpth ++ "/" ++ qpth
+  let sl = if qpth == "" then "" else "/"
+  let pth = dbpth ++ sl ++ qpth
   chk <- checkDir pth
   case chk of
     False -> return Nothing
@@ -34,7 +35,7 @@ getDirContents (DirContentsQuery dbpth qpth mpref msuff mcont) = do
                                         filCont mcont x)
                           fs)
       mapM (\f -> (doesFileExist (pth ++ "/" ++ f) >>=
-                   (\b -> return (qpth ++ "/" ++ f, b))))
+                   (\b -> return (qpth ++ sl ++ f, b))))
         filFs >>= return . Just
   where filPref Nothing _ = True
         filPref (Just pref) x = isPrefixOf (T.unpack pref) x

+ 1 - 2
elfcom-backend/src/Worker.hs

@@ -9,7 +9,6 @@ import qualified Data.Text as T
 
 import Config
 import Model.AccessLog
-import Model.InternalLog
 
 
 workerErrorHandler = ErrorHandlerIO foo
@@ -60,7 +59,7 @@ accessLogWriteDef = (WorkerDef accessLogWriteConfig doAccessLogWrite
 
 doInternalLogEnqueue :: (SpockState m ~ AppState, HasSpock m, MonadIO m) =>
                         T.Text -> m WorkResult
-doInternalLogEnqueue message = do
+doInternalLogEnqueue _ = do
   --appState <- getState
   --liftIO $ queueInternalLogEntry (internalLogChan appState) message
   return WorkComplete

+ 4 - 4
elfcom-backend/static/css/site_files.css

@@ -163,7 +163,7 @@ div.fup-inactive {
 
 div.fup-name {
     display: inline-block;
-    width: 180px;
+    width: 201px;
     vertical-align: bottom;
     margin-right: 5px;
     overflow: hidden;
@@ -173,16 +173,16 @@ div.fup-name {
 
 div.fup-size {
     display: inline-block;
-    width: 55px;
+    width: 47px;
     vertical-align: bottom;
     margin-right: 5px;
 }
 
 div.fup-prog {
     display: inline-block;
-    width: 55px;
+    width: 47px;
     vertical-align: bottom;
-    margin-right: 10px;
+    margin-right: 5px;
 }
 div.fup-prog-done {
     color: green;

+ 5 - 3
elfcom-frontend/files/fetch.js

@@ -2,6 +2,8 @@
 
 import Common from "../common.js";
 import Submit from "./submit.js"; // only for setSubmitAction
+import Upload from "./upload.js"; // only for setTargetDir
+
 import SelTree from "../../libs/selECKpanel/src/seleck.js";
 
 var Fetch = {
@@ -155,9 +157,9 @@ function setSelect(sobjs, selNode, accDiv) {
     sobjs.namEl.value = accDiv.innerHTML + "/" + selNode.content;
     sobjs.renEl.value = accDiv.innerHTML + "/" + selNode.content;
     if(!selNode.isFile) {
-      sobjs.unmEl.value = accDiv.innerHTML + "/" + selNode.content;
+      Upload.setTargetDir(sobjs, accDiv.innerHTML + "/" + selNode.content);
     } else {
-      sobjs.unmEl.value = "";
+      Upload.setTargetDir(sobjs, "");
     }
     selectedNode = selNode;
     selectedDiv = accDiv;
@@ -172,7 +174,7 @@ function setSelect(sobjs, selNode, accDiv) {
     selectedDiv = null;
     sobjs.namEl.value = "";
     sobjs.renEl.value = "";
-    sobjs.unmEl.value = "";
+    Upload.setTargetDir(sobjs, "");
   }
   // Since sobjs.namEl and sobjs.renEl change here, we need to update submit
   // action as well

+ 122 - 1
elfcom-frontend/files/upload.js

@@ -1,15 +1,20 @@
 "use strict";
 
 import Common from "../common.js";
-import SelTree from "../../libs/selECKpanel/src/seleck.js";
 import Fetch from "./fetch.js";
+import SelTree from "../../libs/selECKpanel/src/seleck.js";
+import SHA1 from "../sha1.js";
 
 var Upload = {
   setPending: setPending,
   commitUpload: commitUpload,
+  setTargetDir: setTargetDir,
 };
 export default Upload;
 
+var uploadLocked = false;
+var chunkSize = 1024 * 512; // 500 KiB
+
 
 function setPending(sobjs) {
   var files = sobjs.uplEl.files;
@@ -30,9 +35,115 @@ function commitUpload(sobjs) {
     return;
   } else {
     setUploadLock(sobjs, true);
+    uploadNext(sobjs);
   }
 }
 
+function uploadNext(sobjs) {
+  for(var c of sobjs.uplTree.children) {
+    if(!(c.uploadComplete || c.hasError)) {
+      initiateFileUpload(sobjs, sobjs.uplTree.children[0])
+        .then(function() {
+          c.uploadComplete = true;
+          c.progDiv.innerHTML = "done";
+          c.domContentContainer.className = "fup-top fup-inactive";
+          uploadNext(sobjs);
+          return;
+        })
+        .catch(function() {
+          c.hasError = true;
+          c.progDiv.innerHTML = "error";
+          c.domContentContainer.className = "fup-top fup-inactive";
+          uploadNext(sobjs);
+          return;
+        });
+    }
+  }
+  console.log("all done!");
+}
+
+/*
+  1. Create a json summary containing
+    > Target Directory
+    > File name
+    > Checksum of file
+    > Number of chucks
+  2. Server returns (success / failure) and a list of missing chunks
+  3. Send first missing chunk
+    > Chunk Number
+    > File Content
+  4. Server returns (complete / success / failure) + a list of missing chunks
+  5. Once completed is returned we are done
+*/
+
+function initiateFileUpload(sobjs, selNode) {
+  var file = selNode.file;
+  var numChunks = Math.ceil(1.0 * file.size / chunkSize);
+  selNode.domContentContainer.className = "fup-top";
+  return new Promise(function(resolve, reject) {
+    var reader = new FileReader();
+    reader.onload = function() {
+      var content = reader.result.split(",")[1];
+      var uid = SHA1.hash(content);
+      console.log("Sending header req with uid: " + uid);
+      var fileDes = {
+        tag: "FilesUploadHeaderReq",
+        uhrDir: sobjs.unmEl.value,
+        uhrName: file.name,
+        uhrUid: uid,
+        uhrNumChunks: numChunks,
+      };
+      Common.sendJson("/api/admin/files/upload", fileDes)
+        .then(res => res.json(),
+              () => { Common.fetchError("Upload D"); reject("ajax failed"); })
+        .then(function(json) {
+          if(json.tag === "FilesUploadCompleted") {
+            resolve();
+          } else if(json.tag === "FilesUploadOkay") {
+            uploadNextChunk(json.uplMissingChunks, uid, selNode,
+                            resolve, reject);
+          } else {
+            Common.serverError("Upload D"); reject("server failed");
+          }
+        }, () => { Common.jsonError("Upload D"); reject("json failed");});
+    };
+    reader.readAsDataURL(file.slice(0, chunkSize));
+  });
+}
+
+function uploadNextChunk(missingChunks, uid, selNode, resolve, reject) {
+  var file = selNode.file;
+  var numChunks = Math.ceil(1.0 * file.size / chunkSize);
+  var chunkNum = missingChunks[0];
+  var alreadyUploaded = (numChunks - missingChunks.length) * chunkSize;
+  selNode.progDiv.innerHTML = formatBytes(alreadyUploaded);
+  var start = chunkNum * chunkSize;
+  var reader = new FileReader();
+  reader.onload = function() {
+    var content = reader.result.split(",")[1];
+    var chunk = {
+      tag: "FilesUploadChunkReq",
+      ucrUid: uid,
+      ucrChunkNum: chunkNum,
+      ucrContent: content,
+    };
+    Common.sendJson("/api/admin/files/upload", chunk)
+      .then(res => res.json(),
+            () => { Common.fetchError("Upload D"); reject("ajax failed"); })
+      .then(function(json) {
+        if(json.tag === "FilesUploadCompleted") {
+          resolve();
+        } else if(json.tag === "FilesUploadOkay") {
+          uploadNextChunk(json.uplMissingChunks, uid, selNode,
+                          resolve, reject);
+        } else {
+          Common.serverError("Upload D"); reject("server failed");
+        }
+      }, () => { Common.jsonError("Upload D"); reject("json failed");});
+  };
+  reader.readAsDataURL(file.slice(start, start + chunkSize));
+}
+
 function resetUpload(sobjs) {
   setUploadLock(sobjs, false);
 }
@@ -52,8 +163,10 @@ function createNode(sobjs, file) {
   topDiv.appendChild(progDiv);
   topDiv.appendChild(cancelDiv);
   var selNode = new SelTree(topDiv, null, null, file.name);
+  selNode.progDiv = progDiv;
   selNode.uploadCompleted = false;
   selNode.hasError = false;
+  selNode.file = file;
   cancelDiv.addEventListener("click", function() {
     sobjs.uplTree.removeChildrenByContent(file.name);
     if(sobjs.uplTree.children.length === 0) {
@@ -64,6 +177,7 @@ function createNode(sobjs, file) {
 }
 
 function setUploadLock(sobjs, lock) {
+  uploadLocked = true;
   sobjs.unmEl.disabled = lock;
   sobjs.usbEl.disabled = lock;
   sobjs.uplEl.disabled = lock;
@@ -72,6 +186,7 @@ function setUploadLock(sobjs, lock) {
   } else {
     sobjs.ulbEl.className = "files-upload-label-enabled";
     sobjs.uplTree.domContentContainer.className = "";
+    sobjs.unmEl.value = sobjs.namEl.value; // TODO: hacky way to update unmEl
   }
 }
 
@@ -84,3 +199,9 @@ function formatBytes(bytes) {
   var i = Math.floor(Math.log(bytes) / Math.log(k));
   return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + " " + sizes[i];
 }
+
+function setTargetDir(sobjs, s) {
+  if(!uploadLocked) {
+    sobjs.unmEl.value = s;
+  }
+}

+ 131 - 0
elfcom-frontend/sha1.js

@@ -0,0 +1,131 @@
+/*!
+ * sha1-es
+ * https://github.com/logotype/es-crypto.git
+ *
+ * Copyright 2017 Victor Norgren
+ * Released under the MIT license
+ */
+export default class SHA1 {
+
+  static hash(string) {
+    return SHA1.stringToHex(SHA1.arrayToString(SHA1.run(SHA1.stringToArray(string), string.length * 8)));
+  }
+
+  static run(input, len) {
+    const l = (len + 64 >> 9 << 4) + 15;
+    const W = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
+    let i = 0,
+        H0 = 1732584193,
+        H1 = -271733879,
+        H2 = -1732584194,
+        H3 = 271733878,
+        H4 = -1009589776,
+        a = H0,
+        b = H1,
+        c = H2,
+        d = H3,
+        e = H4;
+
+    input[len >> 5] |= 0x80 << 24 - len % 32;
+    input[l] = len;
+
+    for (; i < l; i += 16) {
+      H0 = a;
+      H1 = b;
+      H2 = c;
+      H3 = d;
+      H4 = e;
+
+      let j = 0,
+          t = null;
+
+      for (; j < 80; j += 1) {
+        if (j < 16) {
+          W[j] = input[i + j];
+        } else {
+          W[j] = SHA1.rotl(W[j - 3] ^ W[j - 8] ^ W[j - 14] ^ W[j - 16], 1);
+        }
+        t = SHA1.add(SHA1.add(SHA1.rotl(a, 5), SHA1.chMajPty(j, b, c, d)), SHA1.add(SHA1.add(e, W[j]), SHA1.cnst(j)));
+        e = d;
+        d = c;
+        c = SHA1.rotl(b, 30);
+        b = a;
+        a = t;
+      }
+
+      a = SHA1.add(a, H0);
+      b = SHA1.add(b, H1);
+      c = SHA1.add(c, H2);
+      d = SHA1.add(d, H3);
+      e = SHA1.add(e, H4);
+    }
+
+    return [a, b, c, d, e];
+  }
+
+  static arrayToString(input) {
+    const l = input.length * 32;
+    let i = 0,
+        output = '';
+
+    for (; i < l; i += 8) {
+      output += String.fromCharCode(input[i >> 5] >>> 24 - i % 32 & 0xFF);
+    }
+    return output;
+  }
+
+  static stringToArray(input) {
+    const l = input.length * 8;
+    const output = Array(input.length >> 2);
+    const lo = output.length;
+    let i = 0;
+
+    for (i = 0; i < lo; i += 1) {
+      output[i] = 0;
+    }
+    for (i = 0; i < l; i += 8) {
+      output[i >> 5] |= (input.charCodeAt(i / 8) & 0xFF) << 24 - i % 32;
+    }
+    return output;
+  }
+
+  static stringToHex(input) {
+    const hex = '0123456789abcdef';
+    const l = input.length;
+    let output = '',
+        x = 0, i = 0;
+
+    for (; i < l; i += 1) {
+      x = input.charCodeAt(i);
+      output += hex.charAt(x >>> 4 & 0x0F) + hex.charAt(x & 0x0F);
+    }
+    return output;
+  }
+
+  static chMajPty(t, b, c, d) {
+    if (t < 20) {
+      return b & c | ~b & d;
+    }
+    if (t < 40) {
+      return b ^ c ^ d;
+    }
+    if (t < 60) {
+      return b & c | b & d | c & d;
+    }
+    return b ^ c ^ d;
+  }
+
+  static cnst(t) {
+    return t < 20 ? 1518500249 : t < 40 ? 1859775393 : t < 60 ? -1894007588 : -899497514;
+  }
+
+  static rotl(x, n) {
+    return x << n | x >>> 32 - n;
+  }
+
+  static add(x, y) {
+    const lsw = (x & 0xFFFF) + (y & 0xFFFF),
+          msw = (x >> 16) + (y >> 16) + (lsw >> 16);
+    return msw << 16 | lsw & 0xFFFF;
+  }
+}