added endpoint to receive encrypted file blocks

This commit is contained in:
Roland Osborne 2023-04-28 14:07:01 -07:00
parent c342ed73b4
commit 452679324a
8 changed files with 195 additions and 40 deletions

View File

@ -4,7 +4,7 @@ import { createThumbnail } from "react-native-create-thumbnail";
import ImageResizer from '@bam.tech/react-native-image-resizer';
import RNFS from 'react-native-fs';
const ENCRYPTED_BLOCK_SIZE = (128 * 1024); //100k
const ENCRYPTED_BLOCK_SIZE = (1024 * 1024);
export function useUploadContext() {
@ -160,17 +160,19 @@ async function upload(entry, update, complete) {
const thumb = await getThumb(data, type, position);
const parts = [];
for (let pos = 0; pos < size; pos += ENCRYPTED_BLOCK_SIZE) {
const { blockEncrypted, blockIv } = await getEncryptedBlock(pos, ENCRYPTED_BLOCK_SIZE);
const partId = await axios.post(`${entry.baseUrl}block${entry.urlParams}`, blockEncrypted, {
const len = pos + ENCRYPTED_BLOCK_SIZE > size ? size - pos : ENCRYPTED_BLOCK_SIZE;
const { blockEncrypted, blockIv } = await getEncryptedBlock(pos, len);
const part = await axios.post(`${entry.baseUrl}blocks${entry.urlParams}`, blockEncrypted, {
headers: {'Content-Type': 'text/plain'},
signal: entry.cancel.signal,
onUploadProgress: (ev) => {
const { loaded, total } = ev;
const partLoaded = pos + Math.floor(blockEncrypted.length * loaded / total);
entry.active = { partLoaded, size }
const partLoaded = pos + Math.floor(len * loaded / total);
entry.active = { loaded: partLoaded, total: size }
update();
}
});
parts.push({ blockIv, partId });
parts.push({ blockIv, partId: part.data.assetId });
}
entry.assets.push({
encrypted: { type, thumb, parts }

View File

@ -3248,6 +3248,52 @@ paths:
type: string
format: binary
/content/channels/{channelId}/topics/{topicId}/blocks:
post:
tags:
- content
description: Add a asset to the channel. Payload is a file block encoded as bas64 string. This is to support e2e as the client side will encrypt the file block before applying the base64 encoding.
operationId: add-channel-topic-block
security:
- bearerAuth: []
parameters:
- name: channelId
in: path
description: specified channel id
required: true
schema:
type: string
- name: topicId
in: path
description: specified topic id
required: true
schema:
type: string
responses:
'201':
description: success
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/Asset'
'401':
description: permission denied
'404':
description: channel not found
'406':
description: storage limit reached
'410':
description: account disabled
'500':
description: internal server error
requestBody:
content:
application/json:
schema:
type: string
/content/channels/{channelId}/topics/{topicId}/assets/{assetId}:
get:
tags:

View File

@ -151,23 +151,6 @@ func AddChannelTopicAsset(w http.ResponseWriter, r *http.Request) {
// invoke transcoder
transcode()
// determine affected contact list
cards := make(map[string]store.Card)
for _, member := range channelSlot.Channel.Members {
cards[member.Card.GUID] = member.Card
}
for _, group := range channelSlot.Channel.Groups {
for _, card := range group.Cards {
cards[card.GUID] = card
}
}
// notify
SetStatus(act)
for _, card := range cards {
SetContactChannelNotification(act, &card)
}
WriteResponse(w, &assets)
}
@ -210,6 +193,10 @@ func saveAsset(src io.Reader, path string) (crc uint32, size int64, err error) {
data := make([]byte, 4096)
for {
n, res := src.Read(data)
if n > 0 {
crc = crc32.Update(crc, table, data[:n])
output.Write(data[:n])
}
if res != nil {
if res == io.EOF {
break
@ -217,9 +204,6 @@ func saveAsset(src io.Reader, path string) (crc uint32, size int64, err error) {
err = res
return
}
crc = crc32.Update(crc, table, data[:n])
output.Write(data[:n])
}
// read size

View File

@ -0,0 +1,107 @@
package databag
import (
"databag/internal/store"
"errors"
"github.com/google/uuid"
"github.com/gorilla/mux"
"gorm.io/gorm"
"net/http"
)
//AddChannelTopicBlock adds a file block asset to a topic
func AddChannelTopicBlock(w http.ResponseWriter, r *http.Request) {
// scan parameters
params := mux.Vars(r)
topicID := params["topicID"]
channelSlot, guid, code, err := getChannelSlot(r, true)
if err != nil {
ErrResponse(w, code, err)
return
}
act := &channelSlot.Account
// check storage
if full, err := isStorageFull(act); err != nil {
ErrResponse(w, http.StatusInternalServerError, err)
return
} else if full {
ErrResponse(w, http.StatusNotAcceptable, errors.New("storage limit reached"))
return
}
// load topic
var topicSlot store.TopicSlot
if err = store.DB.Preload("Topic").Where("channel_id = ? AND topic_slot_id = ?", channelSlot.Channel.ID, topicID).First(&topicSlot).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
ErrResponse(w, http.StatusNotFound, err)
} else {
ErrResponse(w, http.StatusInternalServerError, err)
}
return
}
if topicSlot.Topic == nil {
ErrResponse(w, http.StatusNotFound, errors.New("referenced empty topic"))
return
}
// can only update topic if creator
if topicSlot.Topic.GUID != guid {
ErrResponse(w, http.StatusUnauthorized, errors.New("topic not created by you"))
return
}
// avoid async cleanup of file before record is created
garbageSync.Lock()
defer garbageSync.Unlock()
// save new file
id := uuid.New().String()
path := getStrConfigValue(CNFAssetPath, APPDefaultPath) + "/" + channelSlot.Account.GUID + "/" + id
crc, size, err := saveAsset(r.Body, path)
if err != nil {
ErrResponse(w, http.StatusInternalServerError, err)
return
}
asset := &store.Asset{}
asset.AssetID = id
asset.AccountID = channelSlot.Account.ID
asset.ChannelID = channelSlot.Channel.ID
asset.TopicID = topicSlot.Topic.ID
asset.Status = APPAssetReady
asset.Transform = APPTransformCopy
asset.TransformID = id
asset.Size = size
asset.Crc = crc
err = store.DB.Transaction(func(tx *gorm.DB) error {
if res := tx.Save(asset).Error; res != nil {
return res
}
if res := tx.Model(&topicSlot.Topic).Update("detail_revision", act.ChannelRevision+1).Error; res != nil {
return res
}
if res := tx.Model(&topicSlot).Update("revision", act.ChannelRevision+1).Error; res != nil {
return res
}
if res := tx.Model(&channelSlot.Channel).Update("topic_revision", act.ChannelRevision+1).Error; res != nil {
return res
}
if res := tx.Model(&channelSlot).Update("revision", act.ChannelRevision+1).Error; res != nil {
return res
}
if res := tx.Model(act).Update("channel_revision", act.ChannelRevision+1).Error; res != nil {
return res
}
return nil
})
if err != nil {
ErrResponse(w, http.StatusInternalServerError, err)
return
}
WriteResponse(w, &Asset{AssetID: asset.AssetID, Transform: "_", Status: APPAssetReady})
}

View File

@ -1,5 +1,8 @@
package databag
//APPCopyTransform reserved tranform code indicating copy
const APPTransformCopy = "_"
//APPTokenSize config for size of random access token
const APPTokenSize = 16

View File

@ -545,6 +545,13 @@ var endpoints = routes{
AddChannel,
},
route{
"AddChannelTopicBlock",
strings.ToUpper("Post"),
"/content/channels/{channelID}/topics/{topicID}/blocks",
AddChannelTopicBlock,
},
route{
"AddChannelTopicAsset",
strings.ToUpper("Post"),

View File

@ -2,7 +2,7 @@ import { useState, useRef } from 'react';
import axios from 'axios';
import Resizer from "react-image-file-resizer";
const ENCRYPTED_BLOCK_SIZE = (128 * 1024); //110k
const ENCRYPTED_BLOCK_SIZE = (1024 * 1024);
export function useUploadContext() {
@ -150,17 +150,18 @@ export function useUploadContext() {
return { state, actions }
}
function getImageThumb(url) {
function getImageThumb(data) {
return new Promise(resolve => {
Resizer.imageFileResizer(url, 192, 192, 'JPEG', 50, 0,
Resizer.imageFileResizer(data, 192, 192, 'JPEG', 50, 0,
uri => {
resolve(uri);
}, 'base64', 128, 128 );
});
}
function getVideoThumb(url, pos) {
function getVideoThumb(data, pos) {
return new Promise((resolve, reject) => {
const url = URL.createObjectURL(data);
var video = document.createElement("video");
var timeupdate = function (ev) {
video.removeEventListener("timeupdate", timeupdate);
@ -180,6 +181,7 @@ function getVideoThumb(url, pos) {
resolve(image);
canvas.remove();
video.remove();
URL.revokeObjectURL(url);
}, 1000);
};
video.addEventListener("timeupdate", timeupdate);
@ -192,13 +194,13 @@ function getVideoThumb(url, pos) {
});
}
async function getThumb(url, type, position) {
async function getThumb(data, type, position) {
if (type === 'image') {
return await getImageThumb(url);
return await getImageThumb(data);
}
else if (type === 'video') {
return await getVideoThumb(url, position);
return await getVideoThumb(data, position);
}
else {
return null;
@ -216,22 +218,25 @@ async function upload(entry, update, complete) {
try {
if (file.encrypted) {
const { size, getEncryptedBlock, position, image, video, audio } = file;
const { url, type } = image ? { url: image, type: 'image' } : video ? { url: video, type: 'video' } : audio ? { url: audio, type: 'audio' } : {}
const thumb = await getThumb(url, type, position);
const { data, type } = image ? { data: image, type: 'image' } : video ? { data: video, type: 'video' } : audio ? { data: audio, type: 'audio' } : {}
const thumb = await getThumb(data, type, position);
const parts = [];
for (let pos = 0; pos < size; pos += ENCRYPTED_BLOCK_SIZE) {
const len = pos + ENCRYPTED_BLOCK_SIZE > size ? size - pos : ENCRYPTED_BLOCK_SIZE;
const { blockEncrypted, blockIv } = await getEncryptedBlock(pos, len);
const partId = await axios.post(`${entry.baseUrl}block${entry.urlParams}`, blockEncrypted, {
const part = await axios.post(`${entry.baseUrl}blocks${entry.urlParams}`, blockEncrypted, {
headers: {'Content-Type': 'text/plain'},
signal: entry.cancel.signal,
onUploadProgress: (ev) => {
const { loaded, total } = ev;
const partLoaded = pos + Math.floor(blockEncrypted.length * loaded / total);
entry.active = { partLoaded, size }
const partLoaded = pos + Math.floor(len * loaded / total);
entry.active = { loaded: partLoaded, total: size }
update();
}
});
parts.push({ blockIv, partId });
console.log("PART?", part.data);
parts.push({ blockIv, partId: part.data.assetId });
}
entry.assets.push({
encrypted: { type, thumb, parts }

View File

@ -91,7 +91,8 @@ export function useAddTopic(contentKey) {
if (pos + len > buffer.byteLength) {
return null;
}
const block = arrayBufferToBase64(buffer.slice(pos, len));
const slice = buffer.slice(pos, pos + len);
const block = arrayBufferToBase64(slice);
return encryptBlock(block, contentKey);
}
return { url, encrypted: true, size: buffer.byteLength, getEncryptedBlock };