For Any User Who Wishes to have a local copy of there data

Hello again,

when trying to export all of your data off of CDO can be quite the hassle especially if you want to migrate the data to another project when you remix it but only to have it fail when there’s no data in it

const link = location.href.match(new RegExp(`(?<=(applab|gamelab)/)[^/]+|^[^/]+(?!/)$`));
const id = link[0];
const path = `/datablock_storage/${id}/`;
const storage = { keys: {}, tables: {} };

(async function Main(type) {
    switch (type) {
        case "applab":
            await getTables();
        case "gamelab":
            await getKeyValues();
            const anchor = document.createElement("a");
            anchor.href = URL.createObjectURL(new Blob([JSON.stringify(storage)], {type: "text/json"}));
            anchor.download = `${type}_${id}_storage.json`;
            document.body.appendChild(anchor);
            anchor.click();
            document.removeChild(anchor);
            break;
        default:
            throw "storage medium not supported";
    }
})(link[1])


async function getKeyValues() {
    try {
        storage.keys = (await (await fetch(path + "get_key_values")).json());
        for(let k in storage.keys) {
            storage.keys[k] = JSON.stringify(storage.keys[k])
        }
    } catch (err) {
        throw "unable to complete request: " + err;
    }
}

async function getTables() {
    let tableNames = (await (await fetch(path + "get_table_names")).json());
    for (let name of tableNames) {
        try {
            storage.tables[name] = (await (await fetch(path + "read_records?table_name=" + name)).json());
        } catch (err) {
            throw `unable to append table "${name}" with code: ${err}`
        }
    }
}

all you need to do is be on the project you want to export → inject the code → hit enter → receive all the project data as a json file for later use that’s all i have currently for now if there is interest of being able to migrate data between projects do let me know and i may work on it

for now though i think this will do

best wishes

Varrience

Here’s an update which allows exporting, transfers and downloads between projects (also the main reason i decided to polish it more was because data does not persist when remixed)

/*
ALLOWS IMPORTING OF DATA AFTER BEING EXPORTED FROM CDO
YOU MUST BE SIGNED IN!!

USE CASES FOR THIS SPECIFIC MODULE
    MAIN
        IF YOU HAVE DOWNLOADED A PROJECT AND WISH TO USE THE DATA IN THIS ONE IT SHOULD LOOK LIKE THIS
        main(`// YOUR DATA HERE \\` || OBJECT, BOOLEAN: REMOVE EXISTING DATA?)
        IF YOU USE MY DOWNLOAD IT WILL BE THE FIRST METHOD
        IT DOES NOT GENERATE A VALID OBJECT!
    
    TRANSFER
        NEED TO MOVE YOUR PROJECT BECAUSE YOU LOST YOUR OLD ACCOUNT BUT CAN'T LOAD IN THE DATA? [THIS FEATURE IS FOR YOU]
        YOU NEED TO BE IN YOUR PROJECT AND THEN RUN THE TRANSFER COMMAND LIKE THIS
        transfer("PROJECT ID", BOOLEAN: REMOVE EXISTING DATA?)
        WIPING ALL THE DATA FROM YOUR PROJECT ENSURES THAT THE POPULATION CAN WRITE IN THE DATA YOU NEED
    
    DOWNLOAD
        THIS IS MAINLY FOR PEOPLE WHO WANT TO SELF HOST OR HAVE A LOCAL BACKUP OF THE PROJECT DATA THEY NEED OR TO TRANSFER TO WK
        THIS IS THE FORMAT ACCEPTED BY THE MAIN FUNCTION FOR IMPORTING THE DATA FROM A LOCAL FILE
        THIS IS HOW YOU USE IT
        download(id?)
        YOU CAN SPECIFY A DIFFERENT PROJECT IF YOU LIKE THE DEFAULT IS THE ONE YOUR CURRENTLY USING
*/

const link = location.href.match(new RegExp(`(?<=(applab|gamelab)/)[^/]+|^[^/]+(?!/)$`));
const id = link[0];
const type = link[1];
const path = `/datablock_storage/`;
const token = document.getElementsByName("csrf-token")[0].content;
let storage;

// Imports a local dataset from a json source
async function main(data, deleteData) {
    const dataset = typeof data === "string" ? JSON.parse(data) : Object(data);
    if (deleteData) { await deleteOriginalData() }
    switch (type) {
        case "applab":
            if (dataset.tables) await populateTables(dataset.tables);
        case "gamelab":
            if (dataset.keys) await populateKeyValues(dataset.keys, true);
            break;
    }
}

// Transfers data from another project to this one
async function transfer(newId, deleteData) {
    storage = { keys: {}, tables: {} };
    if (deleteData) { await deleteOriginalData() }
    await getKeyValues(newId);
    await getTables(newId);
    main(storage);
}

// Downloads the current projects dataset in json
async function download(cid) {
    storage = { keys: {}, tables: {} };
    switch (type) {
        case "applab":
            await getTables(cid);
        case "gamelab":
            await getKeyValues(cid);
            const anchor = document.createElement("a");
            anchor.href = URL.createObjectURL(new Blob([JSON.stringify(storage)], { type: "text/json" }));
            anchor.download = `${type}_${id}_storage.json`;
            document.body.appendChild(anchor);
            anchor.click();
            document.body.removeChild(anchor);
            break;
        default:
            throw "storage medium not supported";
    }
}

// Populates keyvalues of a given project
async function populateKeyValues(keys, isImport) {
    if (isImport) {
        for(let k in keys) {
            keys[k] = JSON.parse(keys[k]);
        }
    }
    await fetch(genURL() + "populate_key_values", {
        "credentials": "include",
        "headers": {
            "Content-Type": "application/json",
            "X-CSRF-Token": token
        },
        body: JSON.stringify({ key_values_json: JSON.stringify(keys) }),
        "method": "PUT",
        "mode": "cors"
    });
}

// Populates tables of a given project
async function populateTables(tables) {
    await fetch(genURL() + "populate_tables", {
        "credentials": "include",
        "headers": {
            "Content-Type": "application/json",
            "X-CSRF-Token": token
        },
        "body": JSON.stringify({ tables_json: JSON.stringify(tables) }),
        "method": "PUT",
        "mode": "cors"
    });
}

// Stores current key values of a given project
async function getKeyValues(cid, isExport) {
    try {
        storage.keys = (await (await fetch(genURL(cid) + "get_key_values")).json());
        if (isExport) {
            for (let k in storage.keys) {
                storage.keys[k] = JSON.stringify(storage.keys[k])
            }
        }
    } catch (err) {
        throw "unable to complete request: " + err;
    }
}

// Stores current tables of a given project
async function getTables(cid) {
    let baseURL = genURL(cid);
    let tableNames = (await (await fetch(baseURL + "get_table_names")).json());
    for (let name of tableNames) {
        try {
            storage.tables[name] = (await (await fetch(baseURL + "read_records?table_name=" + name)).json());
        } catch (err) {
            throw `unable to append table "${name}" with code: ${err}`
        }
    }
}

// Allows overwritting data since populating data won't override what's already there
async function deleteOriginalData() {
    await fetch(genURL() + "clear_all_data", {
        "credentials": "include",
        "headers": {
            "Content-Type": "application/json",
            "X-CSRF-Token": token
        },
        "body": "{}",
        "method": "DELETE",
        "mode": "cors"
    })
}

// lazy af relative path
function genURL(cid) { return path + (cid || id) + "/" }