Select Git revision
datasetController.py
Code owners
Assign users and groups as approvers for specific file changes. Learn more.
datasetController.py 3.78 KiB
from fastapi import Request
from fastapi.routing import APIRouter
from globals import *
import copy
import json
import BD as bd
import pandas as pd
router = APIRouter(prefix="/datasets")
@router.post("/saveDataset")
async def saveDataset(request: Request):
global SESSION
data = await request.json()
id = data['UUID']
data = data["dataset"]
name = data["name"]
description = data["description"]
integrationType = data["integrationType"]
datesRange = data["datesRange"]
if id in SESSION and "integrateData" in SESSION[id]:
datasetData = copy.deepcopy(SESSION[id]['integrateData'])
datasetData['date'] = datasetData.index.astype(str)
datasetData = datasetData.to_dict('list')
jsonData = json.dumps(datasetData, ensure_ascii=False)
else:
jsonData = json.dumps(data["data"], ensure_ascii=False)
if(POSTGRESQL == True):
bd.execSQL("create table if not exists DATASET (idDataset SERIAL primary key, name varchar(255) default null, description varchar(255) default null, integrationType varchar(255) default null, datesRange varchar(255) default null, data JSON DEFAULT NULL)")
else:
bd.execSQL("create table if not exists DATASET (idDataset int primary key AUTO_INCREMENT, name varchar(255) default null, description varchar(255) default null, integrationType varchar(255) default null, datesRange varchar(255) default null, data JSON DEFAULT NULL) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci")
bd.execSQL("insert into DATASET (name, description, integrationType, datesRange, data) values('" +
name+"','"+description+"','"+integrationType+"','"+datesRange+"','"+jsonData+"')")
return {
"status": True,
}
@router.post("/getDataset")
async def getDataset(request: Request):
data = await request.json()
id = data['id']
result = bd.selectFromSQL("select name, data from DATASET where idDataset="+str(id))[0]
return {
"status": True,
"data": result['data'],
"name": result['name']
}
@router.post("/getDatasetInfo")
async def getDatasetInfo(request: Request):
data = await request.json()
id = data['id']
result = bd.selectFromSQL("select data from DATASET where idDataset="+str(id))[0]['data']
try:
df = pd.DataFrame(json.loads(result))
except:
df = pd.DataFrame(result)
return {
"status": True,
"data": df.describe()
}
@router.post("/getDatasets")
def getDatasets():
if(POSTGRESQL == True):
bd.execSQL("create table if not exists DATASET (idDataset SERIAL primary key, name varchar(255) default null, description varchar(255) default null, integrationType varchar(255) default null, datesRange varchar(255) default null, data JSON DEFAULT NULL)")
else:
bd.execSQL("create table if not exists DATASET (idDataset int primary key AUTO_INCREMENT, name varchar(255) default null, description varchar(255) default null, integrationType varchar(255) default null, datesRange varchar(255) default null, data JSON DEFAULT NULL) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci")
result = bd.selectFromSQL('select iddataset as "idDataset", name, description, integrationtype as "integrationType", datesrange as "datesRange" from DATASET')
return {
"status": True,
"data": result
}
@router.post("/deleteDataset")
async def deleteDataset(request: Request):
data = await request.json()
id = data["id"]
bd.execSQL("delete from DATASET where idDataset="+str(id))
bd.execSQL("delete from EXECUTION where dataset="+str(id))
return {
"status": True,
}
@router.post("/deleteAllDatasets")
def deleteAllDatasets():
bd.execSQL("delete from DATASET")
bd.execSQL("delete from EXECUTION")
return {
"status": True,
}