A resilient webapp for executing queries on either MySQL or PostgreSQL databases with a UI for writing queries and for giving fairly detailed feedback of the errors they cause.
There is a toggle to turn on/off connection pooling for a query before it is sent for execution on the database.
Use of Pinata
The webapp sports a widget for receiving the gateway and JWT of a Pinata account to use them to upload queries and responses to the IPFS and show the uploads in the files section of the Pinata account dashboard
Demo
The app was built to run locally and has not been tweaked to run on a public server. Here are screenshots of some UI flows in the app
the result of pasting a database URL in the dialog
display of a few details queried on the database
UI of webapp as it awaits the result of a query applied to the database
successful execution of the query - the "show query" button shows the query response below
the result of the query after clicking on "show query"
display of the error due to malformed query
Providing details to the Pinata widget for uploading queries and results to the IPFS
display of JSON sent after a successful upload to Pinata
error sent by the server when retrieving upuloads by their CID
My Code
The codebase is written as a hybrid of Node.js and Vercel serverless environments - it can successfully run in either.
A production-ready SQL UI for both MySQL and PostgreSQL databases with crash-proof error handling and support for uploading data to IPFS via Pinata
sqlUI_pinata
A production-ready SQL RDBMS (MySQL or PostgreSQL) UI webapp that accepts and stores database connection strings and executes queries, edited in a basic IDE by the user, in a crash-free manner whereby errors are intuitively shown to the user for correction or learning
Databases can be changed on the fly by configuring them with the webapp, with support for connection pooling and support for uploading SQL queries and responses to the IPFS via Pinata
neon-starter-kit.mp4
Features
A HTTP Node.js server with dynamic support for middlewares and routes to api/ endpoints
Dynamic switching between MySQL and/or PostgreSQL databses without mixups as configured by the user anytime
Compatibility with Vercel serverless environment
Custom configuration that gets persisted until changed
A user interface for complete interaction with database
Option to enable pooled connection for database operations
Thorough testing of database via queries, the server never crashes; it returns detailed errors instead
The documentation regarding the use of Pinata's API did not show, in code, exactly how to upload a file through it
The SDK for uploading json - await pinata.upload.json({}) crashes due to some unclear error regarding the absense of a File
The documentation regarding using pinata.upload.file() uses the new File constructor despite its absence in Node.js
Fixes
The code below, not present in the docs, was used after many errors and trials
pinata.upload.file(newBlob([string_data],type))
Source codes
Pinata
An API endpoint defined in api/pinata.js to handle uploading and retrieving data from Pinata. It equally writes the provided JWT and Gateway strings to a sql_ui_config.json filef
let{PinataSDK}=require('pinata'),both=require('../js/both'),fs=require('fs'),path=require('path'),filePath=path.join(require('../rootDir')(__dirname),'./sql_ui_config.json'),config=fs.existsSync(filePath)&&require(filePath),pinata;functionupload(file,name,buffer,date){date=newDate,buffer=Buffer.from(file,'utf-8'),/** File object in similar fashion to the one present in browsers */// file = { buffer, name, type: 'text/plain', size: buffer.length, lastModified: +date, lastModifiedDate: date.toISOString() },file=newBlob([file],{type:'text/plain'});returnpinata.upload.file(file)}module.exports=asyncfunction(request,response){let{data}=request.body||request.query;/** hardcoded string for splittig both on the client and server */data=data.slice(data.indexOf('=')+1).split('<_8c23#_>'),pinata=newPinataSDK({pinataGateway:data[2],pinataJwt:data[3]});/** write the provided data into files */config&&(config.PINATA_GATEWAY=data[2],config.PINATA_JWT=data[3]),config||={PINATA_GATEWAY:data[2],PINATA_JWT:data[3]},fs.writeFile(filePath,both.format(JSON.stringify(config)),_=>_)// pinata.testAuthentication().then()if(!data[4]){letres//if CID is not in sent in dataupload(data[0],data[1]).then(json=>{console.log('::JSON::',res=json)}).catch(err=>{console.log('::ERROR::',res=err)}).finally(_=>response.json(res))}else{letres;pinata.gateways.get(data[4]).then(file=>console.log('::RETRIEVED::',res=file)).catch(error=>console.log('::RETRIEVED::ERRORED::',res=error)).finally(_=>response.json(res))}}
Vercel
The server.js file is written in such a way that it behaves similarly to using Vercel to receive and respond to API endpoints
letfs=require('fs');/** write ENV variables to process.env if available */fs.readFile('.env.development.local',(err,data)=>{if(err){/*console.error(err); */return;}data.toString().replace(/\#[^\n]+\n/,'').split('\n').filter(e=>e).forEach(el=>{let{0:key,1:value}=el.split('=');process.env[key]=value.replace(/"/g,'');// console.log(process.env[key])})});lethttp=require('http'),path=require('path'),config=fs.existsSync('./config.json')&&require('./config.json')||{PORT:process.env.PORT||3000},mime=require('mime-types'),jobs={GET:function(req,res,parts,fxn){/** middlewares that respond to GET requests are called here */fxn=fs.existsSync(fxn='.'+parts.url+'.js')&&require(fxn)if(parts.query)req.query=parts.params,fxn&&fxn(req,res);return!!fxn;},POST:function(req,res,parts,fxn,bool){fxn=fs.existsSync(fxn='.'+parts.url+'.js')&&require(fxn),req.on('data',function(data){/**create req.body and res.json because the invoked module requires them to be defined */req.body=/\{|\}/.test(data=data.toString())?{data}:(parts=urlParts('?'+data)).params,fxn&&fxn(req,res)});if(!fxn)res.end();/** decided to return true instead of !!fxn since POST requests are not expected to GET html resources */return!!fxn||bool;}},cache={};/** to store the strings of data read from files */http.createServer((req,res,url,parts,data,verb)=>{({url}=parts=urlParts(req.url)),/** data expected to be sent to the client, this approach does away with res.write and res.send in the jobs */res.json=obj=>res.end(JSON.stringify(obj)),// for vercel functionsdata=jobs[verb=req.method](req,res,parts),url=url==='/'?'index.html':url,/** the code below could be moved to a job but it is left here to prioritize it */data||newPromise((resolve,rej,cached)=>{if (data){resolve(/*dynamic data, exit*/);return;}/*(cached=cache[req.url])?resolve(cached):*/fs.readFile(path.join('./',url),(err,buf)=>{if(err)rej(err);elseresolve(cache[req.url]=buf)})}).then(cached=>{res.writeHead(200,{'Access-Control-Allow-Origin':'*','Content-type':mime.lookup(url)||'application/octet-stream'}),/** return dynamic data or static file that was read */// console.log("::PROMISE", [url]),res.end(cached)}).catch((err,str)=>{str='::ERROR:: '+err,// console.error(str='::ERROR:: '+err, [url])res.end(str)})}).listen(config.PORT,_=>{console.log(`Server listening on PORT ${config.PORT}`)})functionurlParts(url,params,query,is_html){params={},query='',url.replace(/\?[^]*/,e=>((query=e.replace('?','')).split('&').forEach(e=>params[(e=e.split('='))[0]]=decodeURIComponent(e[1])),'')),query&&='?'+query,is_html=!/\.[^]+$/.test(is_html=(url=url.replace(query,'')).split('/').pop())||/\.html$/.test(is_html);return{params,query:decodeURIComponent(query),url,is_html}}
Backend for webapp
The API endpoint defined at the route api/accountData.js handles everything regarding configuring, responding to, persisting provided data to the sql_ui_config.json for the webapp.
letdata={},fs=require('fs'),path=require('path'),file=path.join(require('../rootDir')(__dirname),'./sql_ui_config.json'),both=require('../js/both'),dB,stringErr=(err,cause,fix)=>[`"${err.message}"<center>----------</center>${cause}, code: ${err.code} with severity: ${err.severity||'<N/A>'} for sqlState: \`${err.sqlState||'<N/A>'}\`, at position \`${err.position||'<N/A>'}\` for operation \`${err.routine||'<N/A>'}\``,fix],isMySQL;module.exports=function(request,response){let{pooled,query,setup}=request.body||request.query,/** to accommodate get or post requests via this server or Vercel serverless */config=fs.existsSync(file)&&require(file),stored=(config||{}).CONNECTION_STRING;data.result='',data.errors=[];// console.log('::SETUP::', [setup, config, __dirname]);if(setup||stored){config&&(config.CONNECTION_STRING=setup||stored||''),config||={CONNECTION_STRING:setup||stored},// console.log('::SETUP::', setup, config, file),fs.writeFileSync(file,both.format(JSON.stringify(config)));if(!query)dB=null;/** absent query and present setup implies re-configuration of the app
with a different(enforced by the client) connection string which requires reconnecting the database
driver with the new URL
*/isMySQL=/^mysql/.test(setup||stored),data.configured=setup!=stored?setup||stored/**sent the stored db string the very first time to synchronize with client */:/*a truthy instead of stored db string for security*/1,/** settiing dB = null destroys the closure below due to ||=, in order to update connectionString:setup after errors or re-configurations */setup&&(dB||=require('../db')({isMySQL,connectionString:setup}))/** avoid requring module and invoking its exported function from scratch until either set to null or destroyed in serverless functions*/.then(operate).catch(err=>{data.errors[0]=stringErr(err,'Cause: connection string provided for configuration contains a nonentity','Re-configure app and ensure that the provided database URL resolves to an actual database'),dB=null,response.json(data)});if(stored&&!setup){/** provide pinata configs to sent data if available as UI gimmick that fills them there in the client */letres={configured:setup||stored};['JWT','GATEWAY'].forEach((env,value)=>{(value=config['PINATA_'+env])&&(res[env.toLowerCase()]=value)}),response.json(res)}}elsereturnresponse.json({configured:0});/* section that actually applies custom queries to the database */functionoperate(db){db.pooled=pooled;if(query=query.replace(/\++/g,'\t'))db.query(query).then(arr=>{data.result=isMySQL?arr[0]:arr}).catch(err=>data.errors[0]=stringErr(err,`Query: \`${query.split(/\s/).shift()}\``,'Write syntactically correct queries and only specify fields or tables present in the the database or operations supported by the provider')).finally(_=>response.json(data));else{letcount=0;/**used outside of loop cause the promsified nature makes the index i unreliable *//** added condition to avoid errors from reading non-existent functions or fields*/['version'].concat(isMySQL?[]:['inet_server_addr','inet_server_port']).forEach((query,i,a)=>{db.query(`select ${query}()`).then(arr=>{data[query.replace('inet_','')]=(arr=arr.flat())[0][query]||arr[0][query+'()']}).catch(err=>data.errors[0]=['::DATABASE CONNECTION:: '+(/*data.version=*/err.message),'Connect to the internet and remove typos in the environment variables for connecting the database']).finally(_=>{if(!a[++count])data.version="VERSION • "+data.version,response.json(data);})})}}}