working v1
فشلت بعض الفحوصات
Release Please / release-please (push) Has been cancelled
Docker Build and Push (Development) / build-and-push-dev (push) Has been cancelled

هذا الالتزام موجود في:
2025-11-17 17:19:47 +03:00
التزام 1fda64ec43
288 ملفات معدلة مع 27337 إضافات و0 حذوفات

عرض الملف

@@ -0,0 +1,56 @@
const mongoose = require("../backend/db/mongooseServerUtils");
const conn = mongoose.connection;
const prompts = require("prompts");
const createTempDirectory = require("./createTempDirectory");
const waitForDatabase = () => {
return new Promise((resolve, reject) => {
if (conn.readyState !== 1) {
conn.once("open", () => {
resolve();
})
} else {
resolve();
}
})
}
const copyDatabase = async() => {
console.log("Waiting For Database Connection...");
await waitForDatabase();
console.log("Connected To Database\n");
const userConfimation = await prompts({
type: 'text',
message: "Warning: This will create a new Database backup, overwriting\n" +
"the current database backup. Only ONE Database backup\n" +
"can Be Stored At A Time.\n" +
"For more permanent backups, use MongoExport, or \n" +
"Backup data manually. \n" +
"Would you like to continue? (Yes/No)",
name: "value"
})
if (!userConfimation.value || userConfimation.value.toLowerCase() !== "yes") {
console.log("Exiting...")
process.exit()
return;
}
await createTempDirectory();
console.log("Finished Copying Database, Exiting...");
process.exit();
}
copyDatabase()

عرض الملف

@@ -0,0 +1,295 @@
const mongoose = require("../backend/db/mongooseServerUtils");
const conn = mongoose.connection;
const ObjectID = require('mongodb').ObjectID
const imageChecker = require("../src/utils/imageChecker");
const createThumbnail = require("./createThumbnailBuffer");
const prompts = require("prompts");
const getKey = require("../key/getKey");
const getNewKey = require("../key/getNewKey");
const crypto = require("crypto");
const env = require("../backend/enviroment/env");
const cliProgress = require('cli-progress');
const createTempDirectory = require("./createTempDirectory");
const waitForDatabase = () => {
return new Promise((resolve, reject) => {
if (conn.readyState !== 1) {
conn.once("open", () => {
resolve();
})
} else {
resolve();
}
})
}
const reencryptFile = (file, newKey, user) => {
return new Promise(async(resolve, reject) => {
const fileID = file._id;
const filename = file.filename;
let decryptBucket = new mongoose.mongo.GridFSBucket(conn.db, {
chunkSizeBytes: 1024 * 255,
bucketName: "temp-fs"
});
let bucket = new mongoose.mongo.GridFSBucket(conn.db, {
chunkSizeBytes: 1024 * 255
});
const metadata = file.metadata;
const readStream = decryptBucket.openDownloadStream(ObjectID(fileID));
const writeStream = bucket.openUploadStream(filename, {metadata});
const foundOldUser = await conn.db.collection("temp-users").findOne({_id: user._id});
const password = getOldEncryptionKey(foundOldUser);
const IV = file.metadata.IV.buffer
const CIPHER_KEY = crypto.createHash('sha256').update(password).digest()
const decipher = crypto.createDecipheriv('aes256', CIPHER_KEY, IV);
const NEW_CIPHER_KEY = crypto.createHash('sha256').update(newKey).digest()
const cipher = crypto.createCipheriv('aes256', NEW_CIPHER_KEY, IV);
cipher.on("error", (e) => {
console.log("de", e);
})
readStream.pipe(decipher).pipe(cipher).pipe(writeStream);
writeStream.on("finish", async(newFile) => {
const imageCheck = imageChecker(filename);
if (file.length < 15728640 && imageCheck) {
try {
await createThumbnail(newFile, filename, user, newKey);
} catch (e) {
console.log("Cannot create thumbnail", e);
}
resolve();
} else {
resolve();
}
})
})
}
const findFiles = async() => {
const userListCursor = await conn.db.collection("users").find({});
const userListCount = await conn.db.collection("users").find({}).count();
const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);
progressBar.start(userListCount, 0);
for await (const currentUser of userListCursor) {
const currentUserID = currentUser._id;
const newEncrpytionKey = getEncryptionKey(currentUser);
const listCursor = await conn.db.collection("temp-fs.files").find({"metadata.owner": ObjectID(currentUserID)});
for await (const currentFile of listCursor) {
await reencryptFile(currentFile, newEncrpytionKey, currentUser);
}
progressBar.increment()
}
progressBar.stop();
}
const generateEncryptionKeys = async(user) => {
const userPassword = user.password;
const masterPassword = env.newKey;
const randomKey = crypto.randomBytes(32);
const iv = crypto.randomBytes(16);
const USER_CIPHER_KEY = crypto.createHash('sha256').update(userPassword).digest();
const cipher = crypto.createCipheriv('aes-256-cbc', USER_CIPHER_KEY, iv);
let encryptedText = cipher.update(randomKey);
encryptedText = Buffer.concat([encryptedText, cipher.final()]);
const MASTER_CIPHER_KEY = crypto.createHash('sha256').update(masterPassword).digest();
const masterCipher = crypto.createCipheriv('aes-256-cbc', MASTER_CIPHER_KEY, iv);
let masterEncryptedText = masterCipher.update(encryptedText);
masterEncryptedText = Buffer.concat([masterEncryptedText, masterCipher.final()]).toString("hex");
user.privateKey = masterEncryptedText;
user.publicKey = iv.toString("hex");
return user;
}
const getOldEncryptionKey = (user) => {
const userPassword = user.password;
const masterEncryptedText = user.privateKey;
const masterPassword = env.key;
const iv = Buffer.from(user.publicKey, "hex");
const USER_CIPHER_KEY = crypto.createHash('sha256').update(userPassword).digest();
const MASTER_CIPHER_KEY = crypto.createHash('sha256').update(masterPassword).digest();
const unhexMasterText = Buffer.from(masterEncryptedText, "hex");
const masterDecipher = crypto.createDecipheriv('aes-256-cbc', MASTER_CIPHER_KEY, iv)
let masterDecrypted = masterDecipher.update(unhexMasterText);
masterDecrypted = Buffer.concat([masterDecrypted, masterDecipher.final()])
let decipher = crypto.createDecipheriv('aes-256-cbc', USER_CIPHER_KEY, iv);
let decrypted = decipher.update(masterDecrypted);
decrypted = Buffer.concat([decrypted, decipher.final()]);
return decrypted;
}
const getEncryptionKey = (user) => {
const userPassword = user.password;
const masterEncryptedText = user.privateKey;
const masterPassword = env.newKey;
const iv = Buffer.from(user.publicKey, "hex");
const USER_CIPHER_KEY = crypto.createHash('sha256').update(userPassword).digest();
const MASTER_CIPHER_KEY = crypto.createHash('sha256').update(masterPassword).digest();
const unhexMasterText = Buffer.from(masterEncryptedText, "hex");
const masterDecipher = crypto.createDecipheriv('aes-256-cbc', MASTER_CIPHER_KEY, iv)
let masterDecrypted = masterDecipher.update(unhexMasterText);
masterDecrypted = Buffer.concat([masterDecrypted, masterDecipher.final()])
let decipher = crypto.createDecipheriv('aes-256-cbc', USER_CIPHER_KEY, iv);
let decrypted = decipher.update(masterDecrypted);
decrypted = Buffer.concat([decrypted, decipher.final()]);
return decrypted;
}
const findUsers = async() => {
const listCursor = await conn.db.collection("temp-users").find({});
const listCount = await conn.db.collection("temp-users").find({}).count();
const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);
progressBar.start(listCount, 0);
for await (const currentUser of listCursor) {
try {
const newUser = await generateEncryptionKeys(currentUser);
await conn.db.collection("users").insertOne(newUser);
progressBar.increment()
} catch (e) {
console.log("e", e);
}
}
progressBar.stop();
}
const changeEncryptionPassword = async() => {
console.log("Waiting For Database...");
await waitForDatabase();
console.log("Connected To Database...\n");
const userConfimation = await prompts({
type: 'text',
message: "Warning: This will automatically run Backup-Database,\n" +
"overwriting the current Backup. And will also clear all file chunks\n" +
"other than the Data Backup. Then it will re-encrypt files and move them back over.\n" +
"(Optional) Create a manual Backup for additional safety.\n" +
"Would you like to continue? (Yes/No)",
name: "value"
})
if (!userConfimation.value || userConfimation.value.toLowerCase() !== "yes") {
console.log("Exiting...")
process.exit()
return;
}
console.log("\nGetting Old Password...");
await getKey();
console.log("Got Key\n")
console.log("Getting New Password...");
await getNewKey();
console.log("Got New Key\n");
console.log("Creating Temporary Collection...\n");
await createTempDirectory();
console.log("Temporary Collection Completed\n")
console.log("Created New Backup Sucessfully\n")
console.log("Deleting Current Chunks Collection...");
try {
await conn.db.collection("fs.chunks").drop();
} catch (e) {}
console.log("Current Chunk Collection Deleted\n");
console.log("Deleting Current File Collection...");
try {
await conn.db.collection("fs.files").drop();
} catch (e) {}
console.log("Deleted Current File Collection\n")
console.log("Delete Current Users...");
try {
await conn.db.collection("users").drop();
} catch (e) {}
console.log("Current Users Deleted\n");
console.log("Deleting Current Thumbnails...");
try {
await conn.db.collection("thumbnails").drop();
} catch (e) {}
console.log("Deleted Current Thumbnails\n");
console.log("Generating User Encryption Keys...");
await findUsers();
console.log("Generated User Encryption Keys\n")
console.log("Moving Files By User...");
await findFiles();
console.log("Moved All Files...\n")
process.exit();
}
changeEncryptionPassword();

عرض الملف

@@ -0,0 +1,118 @@
const mongoose = require("../backend/db/mongooseServerUtils");
const conn = mongoose.connection;
const ObjectID = require('mongodb').ObjectID
const prompts = require("prompts");
const cliProgress = require('cli-progress');
const createTempDirectory = require("./createTempDirectory");
const waitForDatabase = () => {
return new Promise((resolve, reject) => {
if (conn.readyState !== 1) {
conn.once("open", () => {
resolve();
})
} else {
resolve();
}
})
}
const moveFileChunks = async(fileID, oldDatabaseChunks, newDatabaseChunks) => {
const listChunkCursor = await conn.db.collection(oldDatabaseChunks).find({files_id: ObjectID(fileID)});
for await (const currentChunk of listChunkCursor) {
await conn.db.collection(newDatabaseChunks).insertOne(currentChunk);
}
}
const findFiles = async(oldDatabaseList, oldDatabaseChunks, newDatabaseChunks) => {
const listCursor = await conn.db.collection(oldDatabaseList).find({});
const listCount = await conn.db.collection(oldDatabaseList).find({}).count();
const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);
progressBar.start(listCount, 0);
for await (const currentFile of listCursor) {
const fileID = currentFile._id;
await moveFileChunks(fileID, oldDatabaseChunks, newDatabaseChunks);
progressBar.increment();
}
progressBar.stop();
}
const cleanDatabase = async() => {
console.log("Waiting For Database Connection...");
await waitForDatabase();
console.log("Connected To Database\n");
const userConfimation = await prompts({
type: 'text',
message: "Warning: This will automatically run Backup-Database,\n" +
"overwriting the current Backup. And will also clear all file chunks\n" +
"other than the Data Backup. Then it will move only used file chunks\n" +
"over to the Main Database. If this process fails AFTER the Automatic Backup\n" +
"use the Restore-Database feature. \n" +
"Would you like to continue? (Yes/No)",
name: "value"
})
if (!userConfimation.value || userConfimation.value.toLowerCase() !== "yes") {
console.log("Exiting...")
process.exit()
return;
}
console.log("Creating Temporary Collection...\n");
await createTempDirectory();
console.log("Temporary Collection Completed\n")
console.log("Created New Backup Sucessfully\n")
console.log("Deleting Current Chunks Collection...");
try {
await conn.db.collection("fs.chunks").drop();
} catch (e) {}
console.log("Current Chunk Collection Deleted\n");
console.log("Moving Used Files...");
await findFiles("temp-fs.files", "temp-fs.chunks", "fs.chunks");
console.log("Moved All Used Files\n");
console.log("Creating File Chunks Index...");
await conn.db.collection("fs.chunks").createIndex({ files_id: 1, n: 1 }, { unique: true });
console.log("Created File Chunks Index\n");
console.log("Deleteing Current Transcoded Video Chunks Collection...");
try {
await conn.db.collection("videos.chunks").drop();
} catch (e) {}
console.log("Deleted Current Transcoded Video Chunks Collection\n")
console.log("Moving Used Video Files...");
await findFiles("temp-videos.files", "temp-videos.chunks", "videos.chunks");
console.log("Moved All Used Video Files\n")
console.log("Creating Transcoded Video Chunks Index...");
await conn.db.collection("videos.chunks").createIndex({ files_id: 1, n: 1 }, { unique: true });
console.log("Created Transcoded Video Chunks Index")
process.exit();
}
cleanDatabase();

عرض الملف

@@ -0,0 +1,52 @@
const getEnvVariables = require("../dist/enviroment/getEnvVariables");
getEnvVariables();
const mongoose = require("./mongoServerUtil");
const conn = mongoose.connection;
const waitForDatabase = () => {
return new Promise((resolve, reject) => {
if (conn.readyState !== 1) {
conn.once("open", () => {
resolve();
})
} else {
resolve();
}
})
}
const createIndexes = async() => {
console.log("Waiting For Database...");
await waitForDatabase();
console.log("Connected To Database\n");
console.log("Creating Indexes...")
await conn.db.collection("fs.files").createIndex({uploadDate: 1});
await conn.db.collection("fs.files").createIndex({uploadDate: -1});
await conn.db.collection("fs.files").createIndex({filename: 1});
await conn.db.collection("fs.files").createIndex({filename: -1});
await conn.db.collection("fs.files").createIndex({"metadata.owner": 1});
await conn.db.collection("folders").createIndex({createdAt: 1})
await conn.db.collection("folders").createIndex({createdAt: -1})
await conn.db.collection("folders").createIndex({name: 1});
await conn.db.collection("folders").createIndex({name: -1})
await conn.db.collection("folders").createIndex({owner: 1})
await conn.db.collection("thumbnails").createIndex({owner: 1})
console.log("Indexes Created");
process.exit();
}
createIndexes();

عرض الملف

@@ -0,0 +1,103 @@
const mongoose = require("../backend/db/mongooseServerUtils");
const conn = mongoose.connection;
const cliProgress = require('cli-progress');
const clearTempDirectory = async() => {
console.log("Removing Temporary Collections...");
try {
await conn.db.collection("temp-fs.files").drop();
} catch (e) {}
try {
await conn.db.collection("temp-fs.chunks").drop();
} catch (e) {}
try {
await conn.db.collection("temp-thumbnails").drop();
} catch (e) {}
try {
await conn.db.collection("temp-folders").drop();
} catch (e) {}
try {
await conn.db.collection("temp-videos.files").drop();
} catch (e) {}
try {
await conn.db.collection("temp-videos.chunks").drop();
} catch (e) {}
try {
await conn.db.collection("temp-users").drop();
} catch (e) {}
console.log("Removed Temporary Collections\n")
}
const moveItem = async(oldPath, newPath) => {
const listCursor = await conn.db.collection(oldPath).find({});
const listCount = await conn.db.collection(oldPath).find({}).count();
const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);
progressBar.start(listCount, 0);
for await (const currentFile of listCursor) {
await conn.db.collection(newPath).insertOne(currentFile);
progressBar.increment();
}
progressBar.stop();
}
const createTempDirectory = async() => {
await clearTempDirectory();
console.log("Moving Files...")
await moveItem("fs.files", "temp-fs.files")
console.log("Moved All Files\n");
console.log(`Moving File Chunks...`);
await moveItem("fs.chunks", "temp-fs.chunks");
console.log("Moved All Chunks \n");
console.log("Creating Temp File Chunks Index...");
await conn.db.collection("temp-fs.chunks").createIndex({ files_id: 1, n: 1 }, { unique: true });
console.log("Created Temp File Chunks Index\n");
console.log(`Moving Thumbnails...`)
await moveItem("thumbnails", "temp-thumbnails");
console.log("Moved All Thumbnails \n")
console.log(`Moving Folders...`);
await moveItem("folders", "temp-folders");
console.log("All Folders Moved \n");
console.log(`Moving Transcoded Video Files...`);
await moveItem("videos.files", "temp-videos.files")
console.log("All Transcoded Video Files Moved \n");
console.log(`Moving Transcoded Video Chunks...`)
await moveItem("videos.chunks", "temp-videos.chunks")
console.log("All Transcoded Video Chunks Moved \n");
console.log("Creating Temp Transcoded Video Chunks Index...");
await conn.db.collection("videos.chunks").createIndex({ files_id: 1, n: 1 }, { unique: true })
console.log("Created Temp Transcoded Video Chunks Index \n");
console.log(`Moving Users...`)
await moveItem("users", "temp-users")
console.log("All Users Moved\n");
}
module.exports = createTempDirectory;

عرض الملف

@@ -0,0 +1,82 @@
import mongoose from "../backend/db/mongoose";
const conn = mongoose.connection;
const crypto= require("crypto");
import env from "../backend/enviroment/env";
const Thumbnail = require("../backend/models/thumbnail");
const ObjectID = require('mongodb').ObjectID
const sharp = require("sharp");
const concat = require("concat-stream")
const createThumbnail = async(file, filename, user, newKey) => {
return new Promise((resolve) => {
try {
const password = newKey;
let CIPHER_KEY = crypto.createHash('sha256').update(password).digest()
let bucket = new mongoose.mongo.GridFSBucket(conn.db, {
chunkSizeBytes: 1024 * 255,
})
const readStream = bucket.openDownloadStream(ObjectID(file._id))
readStream.on("error", (e) => {
console.log("File service upload thumbnail error", e);
resolve(file);
})
const decipher = crypto.createDecipheriv('aes256', CIPHER_KEY, file.metadata.IV.buffer);
decipher.on("error", (e) => {
console.log("File service upload thumbnail decipher error", e);
resolve(file)
})
const concatStream = concat(async(bufferData) => {
const thumbnailIV = crypto.randomBytes(16);
const thumbnailCipher = crypto.createCipheriv("aes256", CIPHER_KEY, thumbnailIV);
bufferData = Buffer.concat([thumbnailIV, thumbnailCipher.update(bufferData), thumbnailCipher.final()]);
const thumbnailModel = new Thumbnail({name: filename, owner: user._id, data: bufferData});
await thumbnailModel.save();
let updatedFile = await conn.db.collection("fs.files")
.findOneAndUpdate({"_id": file._id}, {"$set": {"metadata.hasThumbnail": true, "metadata.thumbnailID": thumbnailModel._id}})
updatedFile = updatedFile.value;
updatedFile = {...updatedFile, metadata: {...updatedFile.metadata, hasThumbnail: true, thumbnailID: thumbnailModel._id}}
resolve(updatedFile);
}).on("error", (e) => {
console.log("File service upload concat stream error", e);
resolve(file);
})
const imageResize = sharp().resize(300).on("error", (e) => {
console.log("resize error", e);
resolve(file);
})
readStream.pipe(decipher).pipe(imageResize).pipe(concatStream);
} catch (e) {
console.log(e);
resolve(file);
}
})
}
module.exports = createThumbnail;

عرض الملف

@@ -0,0 +1,79 @@
const getEnvVariables = require("../dist-backend/enviroment/get-env-variables");
getEnvVariables();
const mongoose = require("./mongoServerUtil");
const conn = mongoose.connection;
const File = require("../dist-backend/models/file-model");
const User = require("../dist-backend/models/user-model");
const createVideoThumbnail =
require("../dist-backend/services/chunk-service/utils/createVideoThumbnail").default;
const getKey = require("../dist-backend/key/get-key").default;
const waitForDatabase = () => {
return new Promise((resolve, reject) => {
// Wait for the database to be ready.
const timeoutWait = () => {
setTimeout(() => resolve(), 3000);
};
if (conn.readyState !== 1) {
conn.once("open", () => {
timeoutWait();
});
} else {
timeoutWait();
}
});
};
// Wait to be after anything else may be printed to the terminal
const terminalWait = () => {
return new Promise((resolve) => {
setTimeout(() => resolve(), 2000);
});
};
const updateDocs = async () => {
await terminalWait();
console.log(`Updating video thumbnails, env is ${process.env.NODE_ENV}`);
console.log("\nWaiting for database...");
await waitForDatabase();
console.log("Connected to database\n");
console.log("Getting Key...");
await getKey();
console.log("Key Got\n");
// console.log("env", process.env.KEY);
console.log("Getting file list...");
const files = await File.find({
filename: {
$regex:
/\.(mp4|mov|avi|mkv|webm|wmv|flv|mpg|mpeg|3gp|3g2|mxf|ogv|ogg|m4v)$/i,
},
"metadata.thumbnailID": "",
});
console.log("Found", files.length, "files");
for (let i = 0; i < files.length; i++) {
try {
const currentFile = files[i];
console.log(`Progress ${i + 1}/${files.length}`);
const user = await User.findById(currentFile.metadata.owner);
await createVideoThumbnail(currentFile, currentFile.filename, user);
} catch (e) {
console.log("error creating video thumbnail", e);
}
}
console.log("Done");
process.exit();
};
updateDocs();

عرض الملف

@@ -0,0 +1,81 @@
const mongoose = require("../backend/db/mongooseServerUtils");
const prompts = require("prompts");
const conn = mongoose.connection;
const waitForDatabase = () => {
return new Promise((resolve, reject) => {
if (conn.readyState !== 1) {
conn.once("open", () => {
resolve();
})
} else {
resolve();
}
})
}
const deleteTempDatabase = async() => {
console.log("Waiting For Database...");
await waitForDatabase();
console.log("Connected To Database\n")
const userConfimation = await prompts({
type: 'text',
message: "Warning: This will delete all the data in the Main Database,\n" +
"this will not delete any data in the Database Backup.\n" +
"Would you like to continue? (Yes/No)",
name: "value"
})
if (!userConfimation.value || userConfimation.value.toLowerCase() !== "yes") {
console.log("Exiting...")
process.exit()
return;
}
console.log("Removing Collections...");
try {
await conn.db.collection("fs.files").drop();
} catch (e) {}
try {
await conn.db.collection("fs.chunks").drop();
} catch (e) {}
try {
await conn.db.collection("thumbnails").drop();
} catch (e) {}
try {
await conn.db.collection("folders").drop();
} catch (e) {}
try {
await conn.db.collection("videos.files").drop();
} catch (e) {}
try {
await conn.db.collection("videos.chunks").drop();
} catch (e) {}
try {
await conn.db.collection("users").drop();
} catch (e) {}
console.log("Removed Collections\n")
process.exit();
}
deleteTempDatabase();

عرض الملف

@@ -0,0 +1,79 @@
const mongoose = require("../backend/db/mongooseServerUtils");
const prompts = require("prompts");
const conn = mongoose.connection;
const waitForDatabase = () => {
return new Promise((resolve, reject) => {
if (conn.readyState !== 1) {
conn.once("open", () => {
resolve();
})
} else {
resolve();
}
})
}
const deleteTempDatabase = async() => {
console.log("Waiting For Database...");
await waitForDatabase();
console.log("Connected To Database\n")
const userConfimation = await prompts({
type: 'text',
message: "Warning: Deleting the Backup Database cannot be undone,\n" +
"Would you like to continue? (Yes/No)",
name: "value"
})
if (!userConfimation.value || userConfimation.value.toLowerCase() !== "yes") {
console.log("Exiting...")
process.exit()
return;
}
console.log("Removing Temporary Collections...");
try {
await conn.db.collection("temp-fs.files").drop();
} catch (e) {}
try {
await conn.db.collection("temp-fs.chunks").drop();
} catch (e) {}
try {
await conn.db.collection("temp-thumbnails").drop();
} catch (e) {}
try {
await conn.db.collection("temp-folders").drop();
} catch (e) {}
try {
await conn.db.collection("temp-videos.files").drop();
} catch (e) {}
try {
await conn.db.collection("temp-videos.chunks").drop();
} catch (e) {}
try {
await conn.db.collection("temp-users").drop();
} catch (e) {}
console.log("Removed Temporary Collections, Exiting...");
process.exit();
}
deleteTempDatabase();

عرض الملف

@@ -0,0 +1,10 @@
const path = require("path");
const getEnvVariables = () => {
const configPath = path.join(__dirname, "..", "config");
require('dotenv').config({ path: configPath + "/prod.env"})
}
module.exports = getEnvVariables;

عرض الملف

@@ -0,0 +1,47 @@
const getEnvVariables = require("../dist-backend/enviroment/get-env-variables");
getEnvVariables();
const mongoose = require("./mongoServerUtil");
const conn = mongoose.connection;
const File = require("../dist-backend/models/file-model");
const waitForDatabase = () => {
return new Promise((resolve, reject) => {
if (conn.readyState !== 1) {
conn.once("open", () => {
resolve();
});
} else {
resolve();
}
});
};
const updateDocs = async () => {
console.log("\nWaiting for database...");
await waitForDatabase();
console.log("Connected to database\n");
console.log("Getting file list...");
const files = await File.find({});
console.log("Found", files.length, "files");
for (let i = 0; i < files.length; i++) {
const currentFile = files[i];
await File.updateOne(
{ _id: currentFile._id },
{
$set: {
"metadata.owner": currentFile.metadata.owner.toString(),
"metadata.thumbnailID": currentFile.metadata.thumbnailID.toString(),
},
}
);
}
console.log("Done");
process.exit();
};
updateDocs();

عرض الملف

@@ -0,0 +1,5 @@
const mongoose = require("mongoose");
mongoose.connect(process.env.MONGODB_URL, {});
module.exports = mongoose;

عرض الملف

@@ -0,0 +1,79 @@
const getEnvVariables = require("../dist/enviroment/getEnvVariables");
getEnvVariables()
const mongoose = require("./mongoServerUtil");
const conn = mongoose.connection;
const Thumbnail = require("../dist/models/thumbnail");
const File = require("../dist/models/file");
const User = require('../dist/models/user');
const DAY_LIMIT = 0;
const waitForDatabase = () => {
return new Promise((resolve, reject) => {
if (conn.readyState !== 1) {
conn.once("open", () => {
resolve();
})
} else {
resolve();
}
})
}
const removePersonalMetadata = async(user) => {
const fileList = await conn.db.collection("fs.files").find({
"metadata.owner": user._id,
"metadata.personalFile": true,
}).toArray();
for (let currentFile of fileList) {
await File.deleteOne({_id: currentFile._id});
if (currentFile.metadata.hasThumbnail) {
await Thumbnail.deleteOne({_id: currentFile.metadata.thumbnailID})
}
}
await conn.db.collection("folders").deleteMany({'owner': user._id.toString(), 'personalFolder': true})
}
const removeOldPeronsalData = async() => {
console.log("Waiting for mongoDB Database...");
await waitForDatabase();
console.log("MongoDB Connection established\n");
const userList = await User.find({'personalStorageCanceledDate': {$exists: true}});
console.log('user list', userList.length);
for (const currentUser of userList) {
let date = new Date(currentUser.personalStorageCanceledDate);
date.setDate(date.getDate() + DAY_LIMIT);
const nowDate = new Date();
if (date.getTime() <= nowDate) {
console.log(`\nUser ${currentUser.email} over expire limit for personal data, deleting metadata...`);
await removePersonalMetadata(currentUser);
console.log(`Removed user ${currentUser.email} personal metadata successfully`);
}
}
console.log('\nFinished removing expired personal metadata')
process.exit();
}
removeOldPeronsalData();

عرض الملف

@@ -0,0 +1,204 @@
const getEnvVariables = require("../dist/enviroment/getEnvVariables");
getEnvVariables()
const mongoose = require("./mongoServerUtil");
const conn = mongoose.connection;
const env = require("../dist/enviroment/env")
const DbUtilsFile = require("../dist/db/utils/fileUtils/index")
const dbUtilsFile = new DbUtilsFile();
const Thumbnail = require("../dist/models/thumbnail");
const removeChunksFS = require("../dist/services/ChunkService/utils/removeChunksFS");
const File = require("../dist/models/file");
const mongod = require("mongodb");
const ObjectID = mongod.ObjectID;
const User = require('../dist/models/user');
const s3 = require("../dist/db/s3");
const Stripe = require("stripe")
const removeChunksS3 = require("../dist/services/ChunkService/utils/removeChunksS3");
const getKey = require("../key/getKey");
const stripKey = env.stripeKey;
const DAY_LIMIT = 30;
const waitForDatabase = () => {
return new Promise((resolve, reject) => {
if (conn.readyState !== 1) {
conn.once("open", () => {
resolve();
})
} else {
resolve();
}
})
}
const mongoRemoveChunks = async(fileList) => {
for (const file of fileList) {
const fileID = file._id;
let bucket = new mongoose.mongo.GridFSBucket(conn.db, {
chunkSizeBytes: 1024 * 255,
});
if (file.metadata.thumbnailID) {
await Thumbnail.deleteOne({_id: file.metadata.thumbnailID});
}
await bucket.delete(new ObjectID(fileID));
}
}
const fsRemoveChunks = async(fileList) => {
for (const file of fileList) {
if (file.metadata.thumbnailID) {
const thumbnail = await Thumbnail.findById(file.metadata.thumbnailID)
const thumbnailPath = thumbnail.path;
await removeChunksFS(thumbnailPath);
await Thumbnail.deleteOne({_id: file.metadata.thumbnailID});
}
await removeChunksFS(file.metadata.filePath);
await File.deleteOne({_id: file._id});
}
}
const s3RemoveChunks = async(fileList) => {
for (const file of fileList) {
const s3Storage = s3;
const bucket = env.s3Bucket;
if (file.metadata.thumbnailID) {
const thumbnail = await Thumbnail.findById(file.metadata.thumbnailID);
const paramsThumbnail = {Bucket: bucket, Key: thumbnail.s3ID};
await removeChunksS3(s3Storage, paramsThumbnail);
await Thumbnail.deleteOne({_id: file.metadata.thumbnailID});
}
const params = {Bucket: bucket, Key: file.metadata.s3ID};
await removeChunksS3(s3Storage, params);
await File.deleteOne({_id: file._id});
}
}
const removeChunkData = async(user) => {
const fileList = await conn.db.collection("fs.files").find({
"metadata.owner": user._id,
"metadata.personalFile": null,
}).toArray();
// console.log("file list", fileList.length);
if (env.dbType === "mongo") {
await mongoRemoveChunks(fileList);
} else if (env.dbType === "fs") {
await fsRemoveChunks(fileList);
} else {
await s3RemoveChunks(fileList);
}
}
const removeFolders = async(user) => {
// console.log("removing folders", user._id)
await conn.db.collection("folders").deleteMany({
owner: user._id.toString(),
personalFolder: null
})
}
const removeOldSubscriptionData = async() => {
console.log("Getting Encryption Password");
await getKey();
console.log("Got encryption key\n")
console.log("Waiting for mongoDB Database...");
await waitForDatabase();
console.log("MongoDB Connection established\n")
console.log("Starting expire data check...")
const allUsers = await User.find({});
console.log("All users length", allUsers.length);
for (const currentUser of allUsers) {
if (currentUser.stripeCanceledDate) {
let date = new Date(currentUser.stripeCanceledDate);
date.setDate(date.getDate() + DAY_LIMIT);
const nowDate = new Date();
if (date.getTime() <= nowDate) {
console.log(`\nUser ${currentUser.email} over expire limit, deleting data...`);
await removeChunkData(currentUser)
await removeFolders(currentUser)
console.log(`Removed user ${currentUser.email} data successfully`);
}
} else if (currentUser.stripeEnabled) {
const stripe = new Stripe(stripKey, {
apiVersion: '2020-03-02',
});
const {subID}= await currentUser.decryptStripeData();
const subscriptionDetails = await stripe.subscriptions.retrieve(subID);
if (subscriptionDetails.status !== "active" && subscriptionDetails.status !== "trailing") {
const endedAt = (subscriptionDetails.ended_at * 1000);
let date = new Date(endedAt);
date.setDate(date.getDate() + DAY_LIMIT);
const nowDate = new Date();
if (date.getTime() <= nowDate) {
console.log(`\nUser ${currentUser.email} over expire limit, deleting data...`);
await removeChunkData(currentUser)
await removeFolders(currentUser)
console.log(`Removed user ${currentUser.email} data successfully`);
}
}
}
}
console.log("\nFinished removing all expired data")
process.exit()
}
removeOldSubscriptionData()

عرض الملف

@@ -0,0 +1,44 @@
const getEnvVariables = require("../dist/enviroment/getEnvVariables");
getEnvVariables();
const mongoose = require("./mongoServerUtil");
const conn = mongoose.connection;
const User = require("../dist/models/user");
const waitForDatabase = () => {
return new Promise((resolve, reject) => {
if (conn.readyState !== 1) {
conn.once("open", () => {
resolve();
})
} else {
resolve();
}
})
}
const removeTokens = async() => {
console.log("\nWaiting for database...");
await waitForDatabase();
console.log("Connected to database\n");
console.log("Removing tokens from users...");
const userList = await User.find({});
await User.updateMany({}, {
tokens: [],
tempTokens: []
})
console.log("Removed tokens from", userList.length, "users");
process.exit();
}
removeTokens();

عرض الملف

@@ -0,0 +1,55 @@
const mongoose = require("../backend/db/mongooseServerUtils");
const prompts = require("prompts");
const restoreFromTempDirectory = require("./restoreFromTempDirectory");
const conn = mongoose.connection;
const waitForDatabase = () => {
return new Promise((resolve, reject) => {
if (conn.readyState !== 1) {
conn.once("open", () => {
resolve();
})
} else {
resolve();
}
})
}
const restoreDatabase = async() => {
const userConfimation = await prompts({
type: 'text',
message: "Warning: This will delete ALL data," +
" other than the Data Backup created by CopyDatabase. \nMake sure to first run CopyDatabase, and backup" +
" your data, \nWould you like to continue? (Yes/No)",
name: "value"
})
if (!userConfimation.value || userConfimation.value.toLowerCase() !== "yes") {
console.log("Exiting...")
process.exit()
return;
} else {
await waitForDatabase();
await restoreFromTempDirectory();
console.log("Finished Restoring Data, Exiting...");
process.exit();
}
}
restoreDatabase()

عرض الملف

@@ -0,0 +1,105 @@
const mongoose = require("../backend/db/mongooseServerUtils");
const cliProgress = require('cli-progress');
const conn = mongoose.connection;
const clearDirectory = async() => {
console.log("Removing Collections...");
try {
await conn.db.collection("fs.files").drop();
} catch (e) {}
try {
await conn.db.collection("fs.chunks").drop();
} catch (e) {}
try {
await conn.db.collection("thumbnails").drop();
} catch (e) {}
try {
await conn.db.collection("folders").drop();
} catch (e) {}
try {
await conn.db.collection("videos.files").drop();
} catch (e) {}
try {
await conn.db.collection("videos.chunks").drop();
} catch (e) {}
try {
await conn.db.collection("users").drop();
} catch (e) {}
console.log("Removed Collections\n")
}
const moveItem = async(oldPath, newPath) => {
const listCursor = await conn.db.collection(oldPath).find({});
const listCount = await conn.db.collection(oldPath).find({}).count();
const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);
progressBar.start(listCount, 0);
for await (const currentFile of listCursor) {
await conn.db.collection(newPath).insertOne(currentFile);
progressBar.increment();
}
progressBar.stop();
}
const restoreFromTempDirectory = async() => {
console.log("\n");
await clearDirectory();
console.log("Moving Files...")
await moveItem("temp-fs.files", "fs.files")
console.log("Moved All Files\n");
console.log(`Moving File Chunks...`);
await moveItem("temp-fs.chunks","fs.chunks");
console.log("Moved All Chunks \n");
console.log("Creating File Index...");
await conn.db.collection("fs.chunks").createIndex({ files_id: 1, n: 1 }, { unique: true })
console.log("File Index Created \n");
console.log(`Moving Thumbnails...`)
await moveItem("temp-thumbnails", "thumbnails");
console.log("Moved All Thumbnails \n")
console.log(`Moving Folders...`);
await moveItem("temp-folders", "folders");
console.log("All Folders Moved \n");
console.log(`Moving Transcoded Video Files...`);
await moveItem("temp-videos.files", "videos.files")
console.log("All Transcoded Video Files Moved \n");
console.log(`Moving Transcoded Video Chunks...`)
await moveItem("temp-videos.chunks", "videos.chunks")
console.log("All Transcoded Video Chunks Moved \n");
console.log("Creating Transcoded Video Chunks Index...");
await conn.db.collection("videos.chunks").createIndex({ files_id: 1, n: 1 }, { unique: true })
console.log("Created Transcoded Video Chunks Index \n");
console.log(`Moving Users...`)
await moveItem("temp-users", "users")
console.log("All Users Moved\n");
}
module.exports = restoreFromTempDirectory;

365
serverUtils/setupServer.js Normal file
عرض الملف

@@ -0,0 +1,365 @@
const prompts = require("prompts");
const fs = require("fs");
const crypto = require("crypto");
const awaitcreateDir = (path) => {
return new Promise((resolve, reject) => {
fs.mkdir(path, () => {
resolve();
})
})
}
const awaitWriteFile = (path, data) => {
return new Promise((resolve, reject) => {
fs.writeFile(path, data, async(err) => {
if (err) {
console.log("file write error", err);
reject();
}
resolve();
})
})
}
const initServer = async() => {
console.log("Setting Up Server...\n");
await awaitcreateDir("./config");
const getDocker = await prompts({
type: 'toggle',
name: 'value',
message: 'Use Docker With myDrive?',
initial: true,
active: 'yes',
inactive: 'no'
})
const docker = getDocker.value;
if (docker) {
let stringBuilder = '';
const getUsingMongo = await prompts({
type: 'toggle',
name: 'value',
message: "Include MongoDB In The Docker Image? (Select No If You're Using MongoDB Atlas)",
initial: true,
active: 'yes',
inactive: 'no'
})
const mongo = getUsingMongo.value;
let mongoURL = "mongodb://mongo:27017/personal-drive";
if (!mongo) {
const getMongoURL = await prompts({
type: 'text',
message: "Enter The MongoDB URL",
name: "value"
})
mongoURL = getMongoURL.value;
}
stringBuilder += "MONGODB_URL=" + mongoURL + "\n"
const getKeyType = await prompts({
type: 'toggle',
name: 'value',
message: "Use WebUI For Encryption Key (Recommended, Selecting No Will Require You To Enter An Encryption Key Now, Which Is Less Secure)",
initial: true,
active: 'yes',
inactive: 'no'
})
let keyType = getKeyType.value;
if (!keyType) {
const getKey = await prompts({
type: 'password',
message: "Enter The Encryption Key",
name: "value"
})
const key = getKey.value;
stringBuilder += "KEY=" + key + "\n";
}
const getClientURL = await prompts({
type: 'text',
message: "Enter The Client URL/IP Address (Enter The Client URL/IP Address (Must Be A Valid Link, Include Port With IP Address If Needed)",
name: "value"
})
const clientURL = getClientURL.value;
stringBuilder += "REMOTE_URL=" + clientURL + "\n";
const getChunkDB = await prompts({
type: 'select',
name: 'value',
message: 'Pick A Database To Store File Chunks',
choices: [
{ title: 'Amazon S3', value: 's3'},
{ title: 'FileSystem', value: 'fs'},
{ title: 'MongoDB', value: 'mongo' },
],
initial: 1
})
const chunkDB = getChunkDB.value;
stringBuilder += "DB_TYPE=" + chunkDB + "\n";
if (chunkDB === "s3") {
const gets3ID = await prompts({
type: 'text',
message: "Enter The S3 ID",
name: "value"
})
const s3ID = gets3ID.value;
stringBuilder += "S3_ID=" + s3ID + "\n";
const gets3Key = await prompts({
type: 'password',
message: "Enter The S3 Key",
name: "value"
})
const s3Key = gets3Key.value;
stringBuilder += "S3_KEY=" + s3Key + "\n";
const gets3Bucket = await prompts({
type: 'text',
message: "Enter The S3 Bucket",
name: "value"
})
const s3Bucket = gets3Bucket.value;
stringBuilder += "S3_BUCKET=" + s3Bucket + "\n";
} else if (chunkDB === "fs") {
const getFSPath = await prompts({
type: 'text',
message: "Enter The FileSystem Path",
name: "value"
})
const fsPath = getFSPath.value;
stringBuilder += "FS_DIRECTORY=" + fsPath + "\n";
}
const getJWTSecret = await prompts({
type: 'password',
message: "Enter JWT Secret",
name: "value",
})
const JWTsecret = getJWTSecret.value;
stringBuilder += "PASSWORD=" + JWTsecret + "\n";
const getUseSSL = await prompts({
type: 'toggle',
name: 'value',
message: "Use SSL? (Will Require SSL Certificate certificate.crt, certificate.ca-bundle, And certificate.key At Root Of The Project)",
initial: true,
active: 'yes',
inactive: 'no'
})
const useSSL = getUseSSL.value;
if (useSSL) {
stringBuilder += "SSL=true\n";
}
stringBuilder += "DISABLE_STORAGE=true\n";
stringBuilder += "DOCKER=true\n";
stringBuilder += "NODE_ENV=production\n";
stringBuilder += "PORT=3000\n";
stringBuilder += "HTTP_PORT=3000\n";
stringBuilder += "HTTPS_PORT=8080\n"
await awaitWriteFile("./docker-variables.env", stringBuilder);
console.log("\nCreated Docker Env File");
} else {
let stringBuilderClient = '';
let stringBuilderServer = '';
const getMongoURL = await prompts({
type: 'text',
message: "Enter The MongoDB URL",
name: "value"
})
const mongoURL = getMongoURL.value;
stringBuilderServer += "MONGODB_URL=" + mongoURL + "\n";
const getKeyType = await prompts({
type: 'toggle',
name: 'value',
message: "Use WebUI For Encryption Key (Recommended, Selecting No Will Require You To Enter An Encryption Key Now, Which Is Less Secure)",
initial: true,
active: 'yes',
inactive: 'no'
})
let keyType = getKeyType.value;
if (!keyType) {
const getKey = await prompts({
type: 'password',
message: "Enter The Encryption Key",
name: "value"
})
const key = getKey.value;
stringBuilderServer += "KEY=" + key + "\n";
}
const getClientURL = await prompts({
type: 'text',
message: "Enter The Client URL/IP Address (Must Be A Valid Link, Include Port With IP Address If Needed)",
name: "value"
})
const clientURL = getClientURL.value;
stringBuilderClient += "REMOTE_URL=" + clientURL + "\n";
const getChunkDB = await prompts({
type: 'select',
name: 'value',
message: 'Pick A Database To Store File Chunks',
choices: [
{ title: 'Amazon S3', value: 's3'},
{ title: 'FileSystem', value: 'fs'},
{ title: 'MongoDB', value: 'mongo' },
],
initial: 1
})
const chunkDB = getChunkDB.value;
stringBuilderServer += "DB_TYPE=" + chunkDB + "\n";
if (chunkDB === "s3") {
const gets3ID = await prompts({
type: 'text',
message: "Enter The S3 ID",
name: "value"
})
const s3ID = gets3ID.value;
stringBuilderServer += "S3_ID=" + s3ID + "\n";
const gets3Key = await prompts({
type: 'password',
message: "Enter The S3 Key",
name: "value"
})
const s3Key = gets3Key.value;
stringBuilderServer += "S3_KEY=" + s3Key + "\n";
const gets3Bucket = await prompts({
type: 'text',
message: "Enter The S3 Bucket",
name: "value"
})
const s3Bucket = gets3Bucket.value;
stringBuilderServer += "S3_BUCKET=" + s3Bucket + "\n";
stringBuilderClient += "DISABLE_STORAGE=true\n";
} else if (chunkDB === "fs") {
const getFSPath = await prompts({
type: 'text',
message: "Enter The FileSystem Path",
name: "value"
})
const fsPath = getFSPath.value;
stringBuilderServer += "FS_DIRECTORY=" + fsPath + "\n";
stringBuilderServer += "ROOT=" + fsPath + "\n";
} else {
stringBuilderClient += "DISABLE_STORAGE=true\n";
}
const getJWTSecret = await prompts({
type: 'password',
message: "Enter JWT Secret",
name: "value",
})
const JWTsecret = getJWTSecret.value;
stringBuilderServer += "PASSWORD=" + JWTsecret + "\n";
const getUseSSL = await prompts({
type: 'toggle',
name: 'value',
message: "Use SSL? (Will Require SSL Certificate certificate.crt, certificate.ca-bundle, And certificate.key At Root Of The Project)",
initial: true,
active: 'yes',
inactive: 'no'
})
const useSSL = getUseSSL.value;
if (useSSL) {
stringBuilderServer += "SSL=true\n";
}
stringBuilderServer += "NODE_ENV=production\n";
stringBuilderClient += "PORT=3000\n";
stringBuilderServer += "HTTP_PORT=3000\n";
stringBuilderServer += "HTTPS_PORT=8080\n"
await awaitWriteFile("./.env.production", stringBuilderClient);
await awaitWriteFile("./config/prod.env", stringBuilderServer);
console.log("\nServer And Client Env Files Created");
}
}
initServer();