fix: chunked processing, concat sql statements with parameters

This commit is contained in:
Samuel 2024-12-24 14:20:47 +01:00
parent b3e3eb7270
commit 55e33a87b1
5 changed files with 231 additions and 292 deletions

175
test/.gitignore vendored
View file

@ -1,175 +0,0 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Caches
.cache
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
# Gatsby files
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
# IntelliJ based IDEs
.idea
# Finder (MacOS) folder config
.DS_Store

37
test/dist/index.js vendored
View file

@ -29,7 +29,7 @@ export async function decryptBackup(file, passphrase, progressCallback) {
console.info(`${percent}% done`);
}
console.log(`Processing chunk at offset ${offset}`);
// console.log(`Processing chunk at offset ${offset}`);
const chunk = file.slice(offset, offset + chunkSize);
const arrayBuffer = await chunk.arrayBuffer();
const uint8Array = new Uint8Array(arrayBuffer);
@ -47,19 +47,18 @@ export async function decryptBackup(file, passphrase, progressCallback) {
}
offset += chunkSize;
console.log(`Completed chunk, new offset: ${offset}`);
if (performance.memory) {
const memoryInfo = performance.memory;
console.log(`Total JS Heap Size: ${memoryInfo.totalJSHeapSize} bytes`);
console.log(`Used JS Heap Size: ${memoryInfo.usedJSHeapSize} bytes`);
console.log(`JS Heap Size Limit: ${memoryInfo.jsHeapSizeLimit} bytes`);
} else {
console.log("Memory information is not available in this environment.");
}
// console.log(`Completed chunk, new offset: ${offset}`);
// if (performance.memory) {
// const memoryInfo = performance.memory;
// console.log(`Total JS Heap Size: ${memoryInfo.totalJSHeapSize} bytes`);
// console.log(`Used JS Heap Size: ${memoryInfo.usedJSHeapSize} bytes`);
// console.log(`JS Heap Size Limit: ${memoryInfo.jsHeapSizeLimit} bytes`);
// } else {
// console.log("Memory information is not available in this environment.");
// }
}
console.log("All chunks processed, finishing up");
console.log(window.performance.measureUserAgentSpecificMemory());
// console.log("All chunks processed, finishing up");
return decryptor.finish();
} catch (e) {
console.error("Decryption failed:", e);
@ -71,13 +70,15 @@ async function decrypt(file, passphrase) {
try {
const result = await decryptBackup(file, passphrase);
console.log("Database bytes length:", result.databaseBytes.length);
console.log("Preferences:", result.preferences);
console.log("Key values:", result.keyValues);
console.log(result, result.database_bytes);
// Example: Convert database bytes to SQL statements
const sqlStatements = new TextDecoder().decode(result.databaseBytes);
console.log("SQL statements:", sqlStatements);
// console.log("Database bytes length:", result.databaseBytes.length);
console.log(
"Database bytes as string (partly)",
new TextDecoder().decode(result.database_bytes.slice(0, 1024 * 50)),
);
// console.log("Preferences:", result.preferences);
// console.log("Key values:", result.keyValues);
} catch (error) {
console.error("Decryption failed:", error);
}