44 lines
1.4 KiB
Python
44 lines
1.4 KiB
Python
|
# -*-coding:utf-8-*-
|
||
|
import os
|
||
|
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
|
||
|
import json
|
||
|
from flask import Flask, request, make_response
|
||
|
from logzero import logger
|
||
|
|
||
|
current_path = os.path.dirname(os.path.abspath(__file__)) # for local
|
||
|
# current_path = "/app" # for docker
|
||
|
logger.info(f"{current_path}")
|
||
|
|
||
|
from models.lgb_predict import load_config, load_history_data, load_lgb_model, predict
|
||
|
|
||
|
lgb_model = load_lgb_model(model_path=f"{current_path}/model_files/hour_best_model.txt")
|
||
|
object_cols = load_config(f"{current_path}/config/object_cols.json")
|
||
|
history_data = load_history_data(data_path=f"{current_path}/data/data_sample.csv")
|
||
|
emission_factors = load_config(f"{current_path}/config/emission_factor.json")
|
||
|
|
||
|
app = Flask(__name__)
|
||
|
|
||
|
|
||
|
@app.route('/emission/', methods=["POST"])
|
||
|
def run_case_check():
|
||
|
resp_info = dict()
|
||
|
if request.method == "POST":
|
||
|
data = request.json.get('data')
|
||
|
logger.info(data)
|
||
|
if data is not None and len(data) != 0:
|
||
|
rst = predict(history_data, data, lgb_model, object_cols, emission_factors)
|
||
|
resp_info["code"] = 200
|
||
|
resp_info["data"] = rst
|
||
|
else:
|
||
|
resp_info["msg"] = "Input is None, please check !"
|
||
|
resp_info["code"] = 406
|
||
|
resp = make_response(json.dumps(resp_info))
|
||
|
resp.status_code = 200
|
||
|
return resp
|
||
|
|
||
|
|
||
|
|
||
|
|
||
|
if __name__ == '__main__':
|
||
|
app.run(host='0.0.0.0', port=8788, debug=False)
|