Upload main.js via GUI
This commit is contained in:
475
main.js
475
main.js
@@ -6,7 +6,7 @@ const os = require('os');
|
||||
const crypto = require('crypto');
|
||||
const { execSync } = require('child_process');
|
||||
const https = require('https');
|
||||
const http = require('http');
|
||||
const archiver = require('archiver');
|
||||
const Updater = require('./updater.js'); // Auto-Updater
|
||||
let updater = null;
|
||||
|
||||
@@ -36,6 +36,10 @@ const {
|
||||
|
||||
const { initRepo, commitAndPush, getBranches, getCommitLogs } = require('./src/git/gitHandler.js');
|
||||
|
||||
// Backup system
|
||||
const BackupManager = require('./src/backup/BackupManager.js');
|
||||
const LocalProvider = require('./src/backup/LocalProvider.js');
|
||||
|
||||
// NOTE: credentials/data location is computed via getDataDir() to avoid calling app.getPath before ready
|
||||
function getCredentialsFilePath() {
|
||||
return ppath.join(app.getPath('userData'), 'credentials.json');
|
||||
@@ -336,6 +340,93 @@ function readCredentials() {
|
||||
}
|
||||
}
|
||||
|
||||
/* ==================== BACKUP CONFIGURATION ==================== */
|
||||
let backupProviders = {}; // { [repoName]: provider instance }
|
||||
|
||||
function getBackupConfigPath() {
|
||||
return ppath.join(app.getPath('userData'), 'backup-config.json');
|
||||
}
|
||||
|
||||
function readBackupConfig() {
|
||||
try {
|
||||
const file = getBackupConfigPath();
|
||||
if (!fs.existsSync(file)) return {};
|
||||
const encrypted = fs.readFileSync(file);
|
||||
const decipher = crypto.createDecipheriv(ALGORITHM, SECRET_KEY, IV);
|
||||
const decrypted = Buffer.concat([decipher.update(encrypted), decipher.final()]);
|
||||
return JSON.parse(decrypted.toString('utf8'));
|
||||
} catch (e) {
|
||||
console.error('readBackupConfig error', e);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
function saveBackupConfig(config) {
|
||||
try {
|
||||
const file = getBackupConfigPath();
|
||||
const dir = app.getPath('userData');
|
||||
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
||||
const cipher = crypto.createCipheriv(ALGORITHM, SECRET_KEY, IV);
|
||||
const encrypted = Buffer.concat([cipher.update(JSON.stringify(config), 'utf8'), cipher.final()]);
|
||||
fs.writeFileSync(file, encrypted);
|
||||
} catch (e) {
|
||||
console.error('saveBackupConfig error', e);
|
||||
}
|
||||
}
|
||||
|
||||
function getGlobalBackupKey(provider) {
|
||||
return `__global__:${provider}`;
|
||||
}
|
||||
|
||||
async function createProviderInstance(provider, credentials) {
|
||||
if (provider !== 'local') {
|
||||
throw new Error('Nur lokaler Backup-Provider wird unterstützt.');
|
||||
}
|
||||
const providerInstance = new LocalProvider();
|
||||
await providerInstance.authenticate(credentials);
|
||||
return providerInstance;
|
||||
}
|
||||
|
||||
function resolveBackupConfig(config, repoName, providerHint = '') {
|
||||
if (config[repoName] && config[repoName].provider === 'local') {
|
||||
return { source: 'repo', entry: config[repoName] };
|
||||
}
|
||||
|
||||
if (providerHint === 'local') {
|
||||
const gk = getGlobalBackupKey(providerHint);
|
||||
if (config[gk] && config[gk].provider === 'local') {
|
||||
return { source: 'global', entry: config[gk] };
|
||||
}
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(config)) {
|
||||
if (key.startsWith('__global__:') && value && value.provider === 'local' && value.credentials) {
|
||||
return { source: 'global', entry: value };
|
||||
}
|
||||
}
|
||||
|
||||
return { source: 'none', entry: null };
|
||||
}
|
||||
|
||||
function normalizeBackupRepoName(inputRepoName, folderPath) {
|
||||
const raw = String(inputRepoName || '').trim();
|
||||
const fromRepo = raw.includes('/') ? raw.split('/').pop() : raw;
|
||||
const fallback = String(folderPath || '').split(/[\\/]/).pop() || 'project';
|
||||
const candidate = (fromRepo || fallback).trim();
|
||||
return candidate.replace(/[<>:"/\\|?*]+/g, '-').replace(/\s+/g, '-');
|
||||
}
|
||||
|
||||
async function ensureLocalBackupConfigured(repoName, basePath) {
|
||||
const credentials = { basePath };
|
||||
const providerInstance = await createProviderInstance('local', credentials);
|
||||
backupProviders[repoName] = providerInstance;
|
||||
|
||||
const config = readBackupConfig();
|
||||
config[repoName] = { provider: 'local', credentials };
|
||||
config[getGlobalBackupKey('local')] = { provider: 'local', credentials };
|
||||
saveBackupConfig(config);
|
||||
}
|
||||
|
||||
function mapIpcError(errorLike) {
|
||||
const raw = String(errorLike && errorLike.message ? errorLike.message : errorLike || '').toLowerCase();
|
||||
if (!raw) return 'Unbekannter Fehler.';
|
||||
@@ -392,6 +483,21 @@ async function runLimited(tasks, concurrency = DEFAULT_CONCURRENCY, onProgress =
|
||||
return results;
|
||||
}
|
||||
|
||||
async function createZipFromDirectory(sourceDir, zipFilePath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const out = fs.createWriteStream(zipFilePath);
|
||||
const archive = archiver('zip', { zlib: { level: 5 } });
|
||||
|
||||
out.on('close', resolve);
|
||||
out.on('error', reject);
|
||||
archive.on('error', reject);
|
||||
|
||||
archive.pipe(out);
|
||||
archive.directory(sourceDir, false);
|
||||
archive.finalize();
|
||||
});
|
||||
}
|
||||
|
||||
/* -----------------------------
|
||||
Basic IPC handlers
|
||||
----------------------------- */
|
||||
@@ -474,6 +580,39 @@ ipcMain.handle('create-repo', async (event, data) => {
|
||||
ipcMain.handle('push-project', async (event, data) => {
|
||||
try {
|
||||
if (!data.folder || !fs.existsSync(data.folder)) return { ok: false, error: 'folder-not-found' };
|
||||
|
||||
const emitPrePushStatus = (payload) => {
|
||||
try { event.sender.send('pre-push-backup-status', payload); } catch (_) {}
|
||||
};
|
||||
|
||||
const credentials = readCredentials() || {};
|
||||
const autoBackupEnabled = Boolean(data.autoBackup || credentials.autoBackupEnabled);
|
||||
|
||||
if (autoBackupEnabled) {
|
||||
const backupTarget = String(data.backupTarget || credentials.backupPrefLocalFolder || '').trim();
|
||||
if (!backupTarget) {
|
||||
return { ok: false, error: 'Auto-Backup ist aktiv, aber kein lokaler Backup-Zielordner ist gesetzt.' };
|
||||
}
|
||||
|
||||
const backupRepoName = normalizeBackupRepoName(data.repoName, data.folder);
|
||||
try {
|
||||
emitPrePushStatus({ stage: 'backup-start', repoName: backupRepoName, target: backupTarget });
|
||||
await ensureLocalBackupConfigured(backupRepoName, backupTarget);
|
||||
const backupResult = await ipcMain._handle_create_cloud_backup(event, {
|
||||
repoName: backupRepoName,
|
||||
projectPath: data.folder
|
||||
});
|
||||
|
||||
if (!backupResult?.ok) {
|
||||
emitPrePushStatus({ stage: 'backup-failed', repoName: backupRepoName, error: backupResult?.error || 'Unbekannter Fehler' });
|
||||
return { ok: false, error: `Auto-Backup vor Upload fehlgeschlagen: ${backupResult?.error || 'Unbekannter Fehler'}` };
|
||||
}
|
||||
emitPrePushStatus({ stage: 'backup-done', repoName: backupRepoName, filename: backupResult.filename || '' });
|
||||
} catch (backupErr) {
|
||||
emitPrePushStatus({ stage: 'backup-failed', repoName: backupRepoName, error: mapIpcError(backupErr) });
|
||||
return { ok: false, error: `Auto-Backup vor Upload fehlgeschlagen: ${mapIpcError(backupErr)}` };
|
||||
}
|
||||
}
|
||||
|
||||
// Aktuellen Branch ermitteln (NICHT umbenennen!)
|
||||
let currentBranch = data.branch || null;
|
||||
@@ -531,7 +670,11 @@ ipcMain.handle('push-project', async (event, data) => {
|
||||
const progressCb = percent => { try { event.sender.send('push-progress', percent); } catch (_) {} };
|
||||
const commitMsg = data.commitMessage || 'Update from Git Manager GUI';
|
||||
const pushBranch = currentBranch || 'main';
|
||||
if (autoBackupEnabled) {
|
||||
emitPrePushStatus({ stage: 'upload-start' });
|
||||
}
|
||||
await commitAndPush(data.folder, pushBranch, commitMsg, progressCb);
|
||||
|
||||
return { ok: true };
|
||||
} catch (e) {
|
||||
console.error('push-project error', e);
|
||||
@@ -539,6 +682,336 @@ ipcMain.handle('push-project', async (event, data) => {
|
||||
}
|
||||
});
|
||||
|
||||
/* ==================== BACKUP IPC HANDLERS ==================== */
|
||||
|
||||
ipcMain.handle('setup-backup-provider', async (event, { repoName, provider, credentials, applyGlobally = true }) => {
|
||||
try {
|
||||
if (!repoName) return { ok: false, error: 'repoName fehlt' };
|
||||
const localProvider = 'local';
|
||||
const providerInstance = await createProviderInstance(localProvider, credentials);
|
||||
|
||||
// Store provider and config
|
||||
backupProviders[repoName] = providerInstance;
|
||||
const config = readBackupConfig();
|
||||
config[repoName] = { provider: localProvider, credentials };
|
||||
if (applyGlobally) {
|
||||
config[getGlobalBackupKey(localProvider)] = { provider: localProvider, credentials };
|
||||
}
|
||||
saveBackupConfig(config);
|
||||
|
||||
return { ok: true, message: 'Backup provider configured', provider: localProvider, applyGlobally };
|
||||
} catch (e) {
|
||||
console.error('setup-backup-provider error', e);
|
||||
return { ok: false, error: mapIpcError(e) };
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.handle('test-backup-provider', async (event, { repoName, provider }) => {
|
||||
try {
|
||||
if (!backupProviders[repoName]) {
|
||||
const config = readBackupConfig();
|
||||
const resolved = resolveBackupConfig(config, repoName, provider);
|
||||
if (!resolved.entry) {
|
||||
return { ok: false, error: 'Backup provider not configured' };
|
||||
}
|
||||
backupProviders[repoName] = await createProviderInstance(
|
||||
resolved.entry.provider,
|
||||
resolved.entry.credentials
|
||||
);
|
||||
}
|
||||
|
||||
const result = await backupProviders[repoName].testConnection();
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('test-backup-provider error', e);
|
||||
return { ok: false, error: mapIpcError(e) };
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.handle('create-cloud-backup', async (event, { repoName, projectPath }) => {
|
||||
return ipcMain._handle_create_cloud_backup(event, { repoName, projectPath });
|
||||
});
|
||||
|
||||
ipcMain._handle_create_cloud_backup = async (event, { repoName, projectPath }) => {
|
||||
try {
|
||||
if (!backupProviders[repoName]) {
|
||||
// Try to load from disk
|
||||
const config = readBackupConfig();
|
||||
const resolved = resolveBackupConfig(config, repoName);
|
||||
if (!resolved.entry) {
|
||||
return { ok: false, error: 'Backup not configured' };
|
||||
}
|
||||
|
||||
const { provider, credentials } = resolved.entry;
|
||||
const providerInstance = await createProviderInstance(provider, credentials);
|
||||
|
||||
backupProviders[repoName] = providerInstance;
|
||||
// Repo-spezifisch merken, damit es beim naechsten Mal ohne globalen Lookup klappt.
|
||||
config[repoName] = { provider, credentials };
|
||||
saveBackupConfig(config);
|
||||
}
|
||||
|
||||
const manager = new BackupManager(backupProviders[repoName]);
|
||||
const result = await manager.createBackup(projectPath, repoName);
|
||||
|
||||
try { event?.sender?.send('backup-created', result); } catch (_) {}
|
||||
return { ok: true, ...result };
|
||||
} catch (e) {
|
||||
console.error('create-cloud-backup error', e);
|
||||
return { ok: false, error: mapIpcError(e) };
|
||||
}
|
||||
};
|
||||
|
||||
ipcMain.handle('list-cloud-backups', async (event, { repoName, provider }) => {
|
||||
try {
|
||||
if (!backupProviders[repoName]) {
|
||||
// Try to load from disk
|
||||
const config = readBackupConfig();
|
||||
const resolved = resolveBackupConfig(config, repoName, provider);
|
||||
if (!resolved.entry) {
|
||||
return { ok: true, backups: [] };
|
||||
}
|
||||
|
||||
const entry = resolved.entry;
|
||||
const providerInstance = await createProviderInstance(entry.provider, entry.credentials);
|
||||
|
||||
backupProviders[repoName] = providerInstance;
|
||||
config[repoName] = { provider: entry.provider, credentials: entry.credentials };
|
||||
saveBackupConfig(config);
|
||||
}
|
||||
|
||||
const mapBackupMeta = (items) => {
|
||||
return (Array.isArray(items) ? items : []).map((b) => ({
|
||||
filename: b.filename || b.name || '',
|
||||
name: b.name || b.filename || '',
|
||||
size: Number(b.size || 0),
|
||||
modifiedTime: b.modifiedTime || b.date || null,
|
||||
date: b.date || b.modifiedTime || null
|
||||
}));
|
||||
};
|
||||
|
||||
const providerInstance = backupProviders[repoName];
|
||||
const manager = new BackupManager(providerInstance);
|
||||
const showAllBackups = !repoName || repoName === 'all-git-projects';
|
||||
|
||||
if (showAllBackups) {
|
||||
const backups = await providerInstance.listBackups();
|
||||
return { ok: true, backups: mapBackupMeta(backups) };
|
||||
}
|
||||
|
||||
const backups = await manager.listBackups(repoName);
|
||||
return { ok: true, backups: mapBackupMeta(backups) };
|
||||
} catch (e) {
|
||||
console.error('list-cloud-backups error', e);
|
||||
return { ok: true, backups: [] };
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.handle('restore-cloud-backup', async (event, { repoName, filename, targetPath }) => {
|
||||
try {
|
||||
if (!backupProviders[repoName]) {
|
||||
const config = readBackupConfig();
|
||||
const resolved = resolveBackupConfig(config, repoName);
|
||||
if (!resolved.entry) {
|
||||
return { ok: false, error: 'Backup provider not configured' };
|
||||
}
|
||||
backupProviders[repoName] = await createProviderInstance(
|
||||
resolved.entry.provider,
|
||||
resolved.entry.credentials
|
||||
);
|
||||
}
|
||||
|
||||
const manager = new BackupManager(backupProviders[repoName]);
|
||||
const result = await manager.restoreBackup(repoName, filename, targetPath);
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('restore-cloud-backup error', e);
|
||||
return { ok: false, error: mapIpcError(e) };
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.handle('delete-cloud-backup', async (event, { repoName, filename }) => {
|
||||
try {
|
||||
if (!backupProviders[repoName]) {
|
||||
const config = readBackupConfig();
|
||||
const resolved = resolveBackupConfig(config, repoName);
|
||||
if (!resolved.entry) {
|
||||
return { ok: false, error: 'Backup provider not configured' };
|
||||
}
|
||||
backupProviders[repoName] = await createProviderInstance(
|
||||
resolved.entry.provider,
|
||||
resolved.entry.credentials
|
||||
);
|
||||
}
|
||||
|
||||
const manager = new BackupManager(backupProviders[repoName]);
|
||||
const result = await manager.deleteBackup(filename);
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('delete-cloud-backup error', e);
|
||||
return { ok: false, error: mapIpcError(e) };
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.handle('get-backup-auth-status', async (event, { repoName, provider }) => {
|
||||
try {
|
||||
if (!repoName) return { ok: true, connected: false, source: 'none' };
|
||||
if (backupProviders[repoName]) {
|
||||
return { ok: true, connected: true, source: 'memory' };
|
||||
}
|
||||
|
||||
const config = readBackupConfig();
|
||||
const resolved = resolveBackupConfig(config, repoName, provider);
|
||||
if (!resolved.entry) {
|
||||
return { ok: true, connected: false, source: 'none' };
|
||||
}
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
connected: true,
|
||||
source: resolved.source,
|
||||
provider: resolved.entry.provider
|
||||
};
|
||||
} catch (e) {
|
||||
return { ok: false, connected: false, source: 'none', error: mapIpcError(e) };
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.handle('export-gitea-projects-to-local', async (event, data) => {
|
||||
try {
|
||||
const destination = String(data && data.destination ? data.destination : '').trim();
|
||||
const mode = String(data && data.mode ? data.mode : 'single');
|
||||
const owner = String(data && data.owner ? data.owner : '').trim();
|
||||
const repo = String(data && data.repo ? data.repo : '').trim();
|
||||
|
||||
if (!destination) return { ok: false, error: 'Zielordner fehlt' };
|
||||
|
||||
const credentials = readCredentials();
|
||||
const token = credentials && credentials.giteaToken;
|
||||
const url = credentials && credentials.giteaURL;
|
||||
if (!token || !url) return { ok: false, error: 'missing-token-or-url' };
|
||||
|
||||
fs.mkdirSync(destination, { recursive: true });
|
||||
|
||||
let repos = [];
|
||||
if (mode === 'all') {
|
||||
const allRepos = await listGiteaRepos({ token, url });
|
||||
repos = (allRepos || []).map(r => ({
|
||||
owner: (r.owner && (r.owner.login || r.owner.username)) || '',
|
||||
repo: r.name || ''
|
||||
})).filter(r => r.owner && r.repo);
|
||||
} else {
|
||||
if (!owner || !repo) return { ok: false, error: 'owner/repo fehlt' };
|
||||
repos = [{ owner, repo }];
|
||||
}
|
||||
|
||||
let totalRepos = repos.length;
|
||||
let totalFiles = 0;
|
||||
let exportedFiles = 0;
|
||||
const exportedRepos = [];
|
||||
const formatStamp = () => {
|
||||
const now = new Date();
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(now.getDate()).padStart(2, '0');
|
||||
const hours = String(now.getHours()).padStart(2, '0');
|
||||
const minutes = String(now.getMinutes()).padStart(2, '0');
|
||||
const seconds = String(now.getSeconds()).padStart(2, '0');
|
||||
return `${year}${month}${day}-${hours}${minutes}${seconds}`;
|
||||
};
|
||||
|
||||
for (let i = 0; i < repos.length; i++) {
|
||||
const pair = repos[i];
|
||||
const tempRoot = fs.mkdtempSync(ppath.join(os.tmpdir(), 'git-manager-backup-'));
|
||||
const targetRepoDir = ppath.join(tempRoot, `${pair.owner}__${pair.repo}`);
|
||||
fs.mkdirSync(targetRepoDir, { recursive: true });
|
||||
|
||||
const files = [];
|
||||
async function gather(pathInRepo) {
|
||||
const contentRes = await getGiteaRepoContents({
|
||||
token,
|
||||
url,
|
||||
owner: pair.owner,
|
||||
repo: pair.repo,
|
||||
path: pathInRepo || '',
|
||||
ref: 'HEAD'
|
||||
});
|
||||
const items = contentRes && contentRes.items ? contentRes.items : contentRes;
|
||||
if (!Array.isArray(items)) return;
|
||||
for (const item of items) {
|
||||
if (item.type === 'dir') {
|
||||
await gather(item.path);
|
||||
} else if (item.type === 'file') {
|
||||
files.push(item.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await gather('');
|
||||
totalFiles += files.length;
|
||||
|
||||
for (let f = 0; f < files.length; f++) {
|
||||
const remoteFile = files[f];
|
||||
const content = await getGiteaFileContent({
|
||||
token,
|
||||
url,
|
||||
owner: pair.owner,
|
||||
repo: pair.repo,
|
||||
path: remoteFile,
|
||||
ref: 'HEAD'
|
||||
});
|
||||
const localPath = ppath.join(targetRepoDir, remoteFile);
|
||||
fs.mkdirSync(ppath.dirname(localPath), { recursive: true });
|
||||
fs.writeFileSync(localPath, content, 'utf8');
|
||||
|
||||
exportedFiles++;
|
||||
try {
|
||||
event.sender.send('folder-download-progress', {
|
||||
processed: exportedFiles,
|
||||
total: Math.max(totalFiles, exportedFiles),
|
||||
percent: Math.round((exportedFiles / Math.max(totalFiles, exportedFiles)) * 100)
|
||||
});
|
||||
} catch (_) {}
|
||||
}
|
||||
|
||||
const stamp = formatStamp();
|
||||
const backupFilename = `${pair.repo}-backup-${stamp}-${pair.owner}.zip`;
|
||||
const backupPath = ppath.join(destination, backupFilename);
|
||||
|
||||
await createZipFromDirectory(targetRepoDir, backupPath);
|
||||
const zipStat = fs.statSync(backupPath);
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
|
||||
exportedRepos.push({
|
||||
owner: pair.owner,
|
||||
repo: pair.repo,
|
||||
filename: backupFilename,
|
||||
path: backupPath,
|
||||
size: zipStat.size,
|
||||
files: files.length
|
||||
});
|
||||
try {
|
||||
event.sender.send('folder-download-progress', {
|
||||
processed: i + 1,
|
||||
total: totalRepos,
|
||||
percent: Math.round(((i + 1) / Math.max(1, totalRepos)) * 100)
|
||||
});
|
||||
} catch (_) {}
|
||||
}
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
destination,
|
||||
repositories: exportedRepos,
|
||||
repositoryCount: exportedRepos.length,
|
||||
fileCount: exportedFiles
|
||||
};
|
||||
} catch (e) {
|
||||
console.error('export-gitea-projects-to-local error', e);
|
||||
return { ok: false, error: mapIpcError(e) };
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.handle('getBranches', async (event, data) => {
|
||||
try {
|
||||
const branches = await getBranches(data.folder);
|
||||
|
||||
Reference in New Issue
Block a user