Commit 1c01d4f6 authored by Theis's avatar Theis
Browse files

Merge remote-tracking branch 'origin/master'

parents e5bca74b e2988755
Pipeline #100675 passed with stage
in 4 minutes and 8 seconds
......@@ -70,4 +70,4 @@ def main():
print(f"Processed data dictionary: {pt.PROCESSED_DATA_DIR}\n")
if __name__ == "__main__":
main()
\ No newline at end of file
main()
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -11,4 +11,4 @@ setup(
author='Christian Marius Lillelund',
author_email='cl@ece.au.dk',
license='MIT',
)
\ No newline at end of file
)
......@@ -70,4 +70,4 @@ def main():
writer.writerow(data)
if __name__ == '__main__':
main()
\ No newline at end of file
main()
......@@ -260,4 +260,4 @@ def main():
plt.show()
if __name__ == "__main__":
main()
\ No newline at end of file
main()
......@@ -136,4 +136,4 @@ def main():
writer.writerow(data)
if __name__ == '__main__':
main()
\ No newline at end of file
main()
......@@ -8,8 +8,8 @@ import paths as pt
from tools import data_loader
from utility.config import load_config
from utility.data import write_csv
from sksurv.ensemble import GradientBoostingSurvivalAnalysis
from sklearn.model_selection import KFold
from sksurv.ensemble import GradientBoostingSurvivalAnalysis
from sksurv.metrics import (concordance_index_censored,
concordance_index_ipcw,
integrated_brier_score)
......
......@@ -113,4 +113,4 @@ def main():
bbox_inches = "tight")
if __name__ == "__main__":
main()
\ No newline at end of file
main()
AIR ML API
==============================
The API is responsible for exposing the ML models through a HTTP REST API.
Usage
==============================
The documentation is accessible through Swagger at http://127.0.0.1:8000/docs when hosted locally.
In order to deploy the ML API to Azure, please first build the Docker image and then push it to our container registry on Azure. At Azure we run an App Service that will automatically fetch the latest image and deploy it. The following Powershell commands can be used to first authenticate to Azure, connect to the registry, build the image and then push:
$ Connect-AzAccount
$ Connect-AzContainerRegistry -Name aircontainerregistry
$ docker build -t aircontainerregistry.azurecr.io/airapi .
$ docker push aircontainerregistry.azurecr.io/airapi
Contact
==============================
Please contact [Christian Marius Lillelund](mailto:cl@ece.au.dk) or [Christian Fischer Pedersen](mailto:cfp@ece.au.dk) for questions regarding this repository.
\ No newline at end of file
......@@ -35,4 +35,4 @@ class JWTBearer(HTTPBearer):
payload = None
if payload:
is_token_valid = True
return is_token_valid
\ No newline at end of file
return is_token_valid
......@@ -11,15 +11,16 @@ import csv
import joblib
import yaml
import pandas as pd
import numpy as np
from typing import Optional
from fastapi import Depends, FastAPI, HTTPException, Request
from fastapi.responses import JSONResponse
from fastapi_jwt_auth import AuthJWT
from fastapi_jwt_auth.exceptions import AuthJWTException
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from fastapi import Request, HTTPException
from sksurv.ensemble import GradientBoostingSurvivalAnalysis
from xgboost import XGBClassifier
from decimal import Decimal
app = FastAPI(title='AIR API', version='1.0',
description='An API that classifies citizens based on data')
......@@ -88,7 +89,7 @@ class TrainingOutputData(pydantic.BaseModel):
@app.get('/')
def index():
return {'message': f'AIR API v. 0.1'}
return {'message': 'AIR API v. 0.1'}
@app.get('/user', dependencies=[Depends(JWTBearer())], tags=["login"])
def user(Authorize: AuthJWT = Depends()):
......@@ -139,10 +140,12 @@ def predict_alarm(incoming_data: InputData):
df_for_alarm = add_embedding(df.copy(), 'alarm', ats_resolution)
surv_func = alarm_model.predict_survival_function(df_for_alarm)
event_times = [int(x) for x in surv_func[0].x]
surv_probs = [float(x) for x in surv_func[0].y]
prob_after_one_year = 1 - surv_probs[365*5]
event_times = list(range(1, 13))
surv_probs = np.array([float(x) for x in surv_func[0].y][:360])
surv_probs = np.mean(surv_probs.reshape(-1, 30), axis=1)
surv_probs = list(map((lambda x: float(round(100*Decimal(x), 1))), surv_probs))
prob_after_one_year = 100 - surv_probs[-1]
alarm_arguments = generate_alarm_arguments(df, ats_resolution, prob_after_one_year)
return {
......@@ -159,8 +162,8 @@ def predict_training(incoming_data: InputData):
data = validate_data(incoming_data)
df = prepare_data(data, ats_resolution)
complete_model = read_xgb_model(f'complete_xgboost.joblib')
compliance_model = read_xgb_model(f'compliance_xgboost.joblib')
complete_model = read_xgb_model('complete_xgboost.joblib')
compliance_model = read_xgb_model('compliance_xgboost.joblib')
df_for_complete = add_embedding(df.copy(), 'complete', ats_resolution)
df_for_compliance = add_embedding(df.copy(), 'compliance', ats_resolution)
......@@ -264,7 +267,7 @@ def generate_alarm_arguments(df: pd.DataFrame,
arguments.append(loan_period_argument)
arguments.append("får efter et år en nødalarm")
arguments.append(f"med {round(prob_after_one_year*100, 1)}% sandsynlighed")
arguments.append(f"med {round(prob_after_one_year, 1)}% sandsynlighed")
return arguments
def load_settings(file_name):
......
......@@ -68,4 +68,4 @@ def main():
write_csv(df, file_path, file_name)
if __name__ == "__main__":
main()
\ No newline at end of file
main()
......@@ -114,4 +114,4 @@ def main():
write_csv(df, file_path, file_name)
if __name__ == "__main__":
main()
\ No newline at end of file
main()
......@@ -58,4 +58,4 @@ def main():
write_csv(df, file_path, file_name)
if __name__ == "__main__":
main()
\ No newline at end of file
main()
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment