Properly remove archival system and have replacement system for uploading data

master
sigonasr2 4 years ago
parent 8c37c8ccc4
commit 565b56888d
  1. 2
      ngsplanner_seed.sql
  2. 82
      server.js

@ -92,7 +92,7 @@ insert into weapon(name,rarity,level_req,atk,potential_id,variance,base_affix_sl
insert into weapon(name,rarity,level_req,atk,potential_id,variance,base_affix_slots,drop_info,pb_gauge_build,icon)
values('Tzvia',2,4,195,(select id from potential where name='Indomitable Unit' limit 1),0.7,2,'Central City Item Shop, Common Drop',0,'/icons/uc1iBck.png');
insert into weapon(name,rarity,level_req,atk,potential_id,variance,base_affix_slots,drop_info,pb_gauge_build,icon)
values('Primm',1,1,200,(select id from potential where name='Recycler Unit' limit 1),0.7,2,'Central City Item Shop, Common Drop',0,'/icons/uc1iBck.png');
values('Cattleya',1,1,200,(select id from potential where name='Recycler Unit' limit 1),0.7,2,'Central City Item Shop, Common Drop',0,'/icons/uc1iBck.png');
insert into potential_data(potential_id,name,level,mel_dmg,rng_dmg,tec_dmg,crit_rate,crit_dmg,pp_cost_reduction,active_pp_recovery,natural_pp_recovery,dmg_res,all_down_res,burn_res,freeze_res,blind_res,shock_res,panic_res,poison_res,battle_power_value,pb_gauge_build)
values((select id from potential where name='Recycler Unit' limit 1),'Recycler Unit Lv.1',1,1.18,1.18,1.18,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0);

@ -211,7 +211,7 @@ const ENDPOINTDATA=[
]
app.get("/databases",(req,res)=>{
db.query('select * from pg_database where datname like \'ngsplanner%\' order by datname limit 100')
db.query('select * from pg_database where datname like \'ngsplanner%\' order by datname desc limit 100')
.then((data)=>{
res.status(200).json(data.rows)
})
@ -220,6 +220,42 @@ app.get("/databases",(req,res)=>{
})
})
app.post("/databases/restorefrombackup",(req,res)=>{
if (req.body.database) {
db3.query('select * from pg_database where datname=$1',[req.body.database])
.then((data)=>{
if (data.rows.length>0) {
db.end(()=>{})
return db3.query('select pg_terminate_backend (pid) from pg_stat_activity where pg_stat_activity.datname=\'ngsplanner\'')
} else {
throw "Could not find requested database "+req.body.database
}
})
.then(()=>{
return db3.query('drop database ngsplanner')
})
.then(()=>{
return db3.query('create database ngsplanner with template '+req.body.database)
})
.then(()=>{
db = new Pool({
user: 'postgres',
password: '',
host: 'postgres',
database: 'ngsplanner',
port: 5432,
})
res.status(200).send("Done!")
})
.catch((err)=>{
console.log(err.message)
res.status(500).send(err.message)
})
} else {
res.status(500).send("Invalid data!")
}
})
app.post("/databases/testtolive",(req,res)=>{
db.end(()=>{})
db2.end(()=>{})
@ -276,7 +312,7 @@ app.post("/databases/livetotest",(req,res)=>{
password: '',
host: 'postgres',
database: 'ngsplanner2',
port: 5432,
port: 5432,
})
res.status(200).send("Done!")
})
@ -288,7 +324,10 @@ app.post("/databases/livetotest",(req,res)=>{
app.post("/databases/backup",(req,res)=>{
db.end(()=>{})
var date = new Date()
db3.query('create database ngsplanner'+String(date.getFullYear()).padStart(4,'0')+String(date.getMonth()).padStart(2,'0')+String(date.getDate()).padStart(2,'0')+String(date.getHours()).padStart(2,'0')+String(date.getMinutes()).padStart(2,'0')+String(date.getSeconds()).padStart(2,'0')+' with template ngsplanner')
db3.query('select pg_terminate_backend (pid) from pg_stat_activity where pg_stat_activity.datname=\'ngsplanner\'')
.then(()=>{
return db3.query('create database ngsplanner'+String(date.getFullYear()).padStart(4,'0')+String(date.getMonth()).padStart(2,'0')+String(date.getDate()).padStart(2,'0')+String(date.getHours()).padStart(2,'0')+String(date.getMinutes()).padStart(2,'0')+String(date.getSeconds()).padStart(2,'0')+' with template ngsplanner')
})
.then(()=>{
db = new Pool({
user: 'postgres',
@ -326,8 +365,7 @@ function CreateDynamicEndpoints() {
}
})
app.post("/"+endpoint.endpoint,(req,res)=>{
app.post("/"+endpoint.endpoint,async(req,res)=>{
var allExist=true
endpoint.requiredfields.forEach((field)=>{
@ -343,14 +381,30 @@ function CreateDynamicEndpoints() {
var combinedfields = [...endpoint.requiredfields,...endpoint.optionalfields,...endpoint.excludedfields]
//console.log(combinedfields)
var all_filled_fields=combinedfields.filter((field)=>(field in req.body))
db.query('insert into '+endpoint.endpoint+"("+all_filled_fields.join(',')+") values("+all_filled_fields.map((field,i)=>"$"+(i+1)).join(",")+") returning *",all_filled_fields.map((field)=>req.body[field]))
.then((data)=>{
res.status(200).json(data.rows)
})
.catch((err)=>{
res.status(500).send(err.message)
})
var requiresInsert=true
if (endpoint.requiredfields.includes("name")) {
await db.query('update '+endpoint.endpoint+' set '+all_filled_fields.map((field,i)=>field+"=$"+(i+1)).join(",")+' where name=$'+(all_filled_fields.length+1)+' returning *',[...all_filled_fields.map((field)=>req.body[field]),req.body["name"]])
.then((data)=>{
if (data.rows.length===0) {
requiresInsert=true
} else {
requiresInsert=false
res.status(200).json(data.rows)
}
})
.catch((err)=>{
res.status(500).send(err.message)
})
}
if (requiresInsert) {
db.query('insert into '+endpoint.endpoint+"("+all_filled_fields.join(',')+") values("+all_filled_fields.map((field,i)=>"$"+(i+1)).join(",")+") returning *",all_filled_fields.map((field)=>req.body[field]))
.then((data)=>{
res.status(200).json(data.rows)
})
.catch((err)=>{
res.status(500).send(err.message)
})
}
})
app.patch("/"+endpoint.endpoint,(req,res)=>{
@ -481,7 +535,7 @@ app.get('/data',async(req,res)=>{
var promises = []
for (var endpoint of ENDPOINTDATA) {
if (endpoint.requiredfields.includes("name")) {
await db.query('select distinct on (name) name,* from '+endpoint.endpoint+' order by name,id desc')
await db.query('select * from (select distinct on (name) name,* from '+endpoint.endpoint+' order by name,id desc)t order by id asc')
.then((data)=>{
finalresult[endpoint.endpoint]={}
data.rows.forEach((val)=>{finalresult[endpoint.endpoint][val.name]=val})

Loading…
Cancel
Save