Commit ad125d17 authored by Faizan Azim's avatar Faizan Azim 🤓

added limited gitrepo support

parents
DB_URI=sqlite:///./ilens_versions.db
CONFIG_MAP_VARIABLES=MONGO_URI
HELM_REPO = https://gitlab-pm.knowledgelens.com/faizan.azim/utest
GIT_USERNAME = faizan.azim
GIT_TOKEN = MFz2FuGSasbQtBkpdfr6
\ No newline at end of file
__pycache__
.idea
.vscode
version = "V6.6"
if __name__ == '__main__':
from dotenv import load_dotenv
load_dotenv()
from main import app
app.root_path = None
import argparse
import logging
import uvicorn
from scripts.config import Service
ap = argparse.ArgumentParser()
if __name__ == "__main__":
ap.add_argument(
"--port",
"-p",
required=False,
default=Service.port,
help="Port to start the application.",
)
ap.add_argument(
"--bind",
"-b",
required=False,
default=Service.host,
help="IF to start the application.",
)
arguments = vars(ap.parse_args())
logging.info(f"App Starting at {arguments['bind']}:{arguments['port']}")
uvicorn.run("main:app", host=arguments["bind"], port=int(arguments["port"]))
[SERVICE]
port = 3973
host = 0.0.0.0
[SQL_DB]
uri = $DB_URI
[LOGGING]
level=$LOG_LEVEL
traceback=true
\ No newline at end of file
{
"git_modules": [
{
"git_link": "https://gitlab-pm.knowledgelens.com/KnowledgeLens/Products/iLens-2.0/core/server/ilens-assistant/workflow-management.git",
"branch": "master",
"command": [],
"enableMountVolume": true,
"module_port": 45678,
"node_port": 98765
},
{
"git_link": "https://gitlab-pm.knowledgelens.com/KnowledgeLens/Products/iLens-2.0/core/server/ilens-assistant/form-management.git",
"branch": "master",
"command": [],
"enableMountVolume": true,
"module_port": 45678,
"node_port": 98765
},
{
"git_link": "https://gitlab-pm.knowledgelens.com/KnowledgeLens/Products/iLens-2.0/core/server/ebpr-report-engine.git",
"branch": "master",
"command": [],
"enableMountVolume": true,
"module_port": 45678,
"node_port": 98765
},
{
"git_link": "https://gitlab-pm.knowledgelens.com/KnowledgeLens/Products/iLens-2.0/core/server/ebpr-report-engine.git",
"branch": "master",
"command": [
"celery",
"-A",
"worker",
"worker",
"--loglevel=INFO",
"-Q",
"ebpr_reports",
"-c",
"2"
],
"enableMountVolume": true
}
]
}
\ No newline at end of file
name: <{ module_name }>
codeType: backend
type: core
affinity:
enabled: True
values: [ "klblrserv24","klblrserv23" ]
podAutoScaler:
enabled: False
type: "hpa"
maxReplicaCount: 3
minReplicaCount: 1
scalePercentage: 85
deployment:
imageName: <{ image_tag }>
command: <{ command }>
PullSecrets: <{ PullSecrets | default("ilens-azregistry") }>
PullPolicy: IfNotPresent <{ branch }>
resources:
requests:
memory: <{ request_memory | default("250Mi") }>
cpu: <{ request_cpu | default("250Mi") }>
limits:
memory: <{ limit_memory | default("750Mi") }>
cpu: <{ limit_cpu | default("500Mi") }>
environmentVar:
- name: MONGO_URI
valueFrom:
secretKeyRef:
name: mongo-uri
key: MONGO_URI
{% for i in variables %}
{% if 'name' in i and 'value' in i %}
- name:'<{ i.name }>',
'value':'<{ i.value }>'
{% endif %}
{% endfor %}
mountVolume:
enabled: <{ enableMountVolume }>
volumeMounts:
- name: <{ mount_name | default("core-volumes") }>
mountPath: "/code/data"
volumes:
- name: <{ mount_name | default("core-volumes") }>
persistentVolumeClaim:
claimName: <{ claim_name | default("core-volumes") }>
\ No newline at end of file
if __name__ == "__main__":
from dotenv import load_dotenv
load_dotenv()
import argparse
import json
import logging
import os
import shutil
import sys
import git
import gitlab
import jinja2
import ruamel.yaml
from scripts.core import ILensVersionHandler
from scripts.db.psql.databases import get_db_for_func
from scripts.schemas import GetRequest
git_user_name = os.environ.get("GIT_USERNAME", default="harshavardhan.c")
git_access_token = os.environ.get("GIT_TOKEN", default="FEMA6PnP63fJCs6DrtZJ")
config_variables = os.environ.get("CONFIG_MAP_VARIABLES", default="").split(",")
helm_repo = os.environ.get("HELM_REPO", default="")
HELM_PATH = "/ilens-core/ilens-modules"
HELM_STORE_PATH = "./helm-charts"
def render_helm_chart(helm_name, data_dict, variables_list, helm_template_file):
try:
output_path = "helm-charts"
if not os.path.exists(output_path):
os.makedirs(output_path)
helm_path = os.path.join(output_path, f'{helm_name}.yaml')
environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(searchpath='./templates'),
trim_blocks=True,
variable_start_string='<{', variable_end_string='}>')
_render = environment.get_template(helm_template_file).render(module_name=helm_name, **data_dict,
variables=variables_list)
with open(helm_path, "w") as fp:
fp.write(_render)
except Exception as e:
logging.exception(f"Exception occurred while rendering the helm file {helm_name} - {e.args}")
def clone_repository(repo_link, module_output_path, clone_branch):
try:
if repo_link.split("https://")[-1].startswith("gitlab-pm"):
repo_link = repo_link.replace("https://", f"https://{git_user_name}:{git_access_token}@")
repo = git.Repo.clone_from(repo_link, module_output_path,
branch=clone_branch)
return True
except Exception as e:
logging.exception(f"Exception occurred while cloning the git repo - {repo_link} - {e.args}")
return False
def clone_repository_with_defined_file(repo_link: str, file_output_path, clone_branch, private_token, clone_file_path):
try:
base_url = os.environ.get("GIT_BASE_URL", default="https://gitlab-pm.knowledgelens.com/")
repo_link_split = repo_link.split(base_url)
if not repo_link_split:
return False
gl = gitlab.Gitlab(url=base_url, private_token=private_token)
search_str = repo_link_split[-1].replace(".git", "")
pl = gl.projects.list(search=search_str)
if not pl:
return False
pl = pl[0]
with open(file_output_path, 'wb') as f:
pl.files.raw(file_path=clone_file_path, ref=clone_branch, streamed=True, action=f.write)
return True
except Exception as e:
logging.exception(f"Exception occurred while cloning the git repo - {repo_link} - {e.args}")
return False
def convert_yaml_to_json(yaml_file_path):
try:
if not os.path.exists(yaml_file_path):
return {}
_yaml = ruamel.yaml.YAML(typ='safe')
with open(yaml_file_path) as fpi:
yaml_dict = _yaml.load(fpi)
return yaml_dict
except Exception as e:
logging.exception(f"Exception Occurred while reading the yaml file {e.args}")
return {}
def push_helm_deployments(repo_link:str, private_token:str, ilens_version, release_version, client_name):
try:
base_url = os.environ.get("GIT_BASE_URL", default="https://gitlab-pm.knowledgelens.com/")
repo_link_split = repo_link.split(base_url)
if not repo_link_split:
return False
gl = gitlab.Gitlab(url=base_url, private_token=private_token)
search_str = repo_link_split[-1].replace(".git", "")
pl = gl.projects.list(search=search_str)
if not pl:
return False
pl = pl[0]
pl.branches.create({ 'branch': f"{client_name}_{ilens_version}.{release_version}", 'ref': pl.default_branch})
commit_data = {
'branch': f"{client_name}_{ilens_version}.{release_version}",
'commit_message': f"{client_name}_{ilens_version}.{release_version} helm creation"
}
commit_actions = []
files_list = os.listdir(HELM_STORE_PATH)
for file in files_list:
_action = {
'action': 'create',
'file_path': f'{HELM_PATH}/{file}',
'content': open(f'{HELM_STORE_PATH}/{file}').read()
}
commit_actions.append(_action)
commit_data |= {'actions': commit_actions}
pl.commits.create(commit_data)
except Exception as e:
logging.exception(f'Exception while pushing helm deployments: {e.args}')
ap = argparse.ArgumentParser()
db_handler = ILensVersionHandler()
if __name__ == '__main__':
ap.add_argument(
"--ilens_version",
"-iv",
required=False,
default=None,
help="ILens Version Tag",
)
ap.add_argument(
"--release_version",
"-rv",
required=False,
default=None,
help="ILens Release Tag",
)
ap.add_argument(
"--client_name",
"-cn",
required=False,
default=None,
help="Client Name Tag"
)
ap.add_argument(
"--git_repos",
"-gr",
required=False,
default=None,
help="Git repos to be added in helm",
nargs="+"
)
arguments = vars(ap.parse_args())
_ilens_version = arguments["ilens_version"]
_release_version = arguments["release_version"]
_client_name = arguments['client_name']
_git_repos = arguments["git_repos"]
if not _ilens_version or not _release_version or not _client_name or not _git_repos:
print("git_repos, client_name, ilens_version and release_version details not found!!!!!")
sys.exit()
with open("config.json", "r") as f:
data = json.load(f)
data['git_modules'] = [x for x in data['git_modules'] if x['git_link'] in _git_repos]
for _data in data.get('git_modules'):
_ilens_version = _data.get("ilens_version", _ilens_version)
_release_version = _data.get("ilens_version", _release_version)
git_link = _data["git_link"]
branch = _data["branch"]
variables_file = _data.get("variables_file") or "variables.yml"
module_path = os.path.join("tmp")
module_name = git_link.split("/")[-1].split(".git")[0]
module_path = os.path.join(module_path, module_name)
if not os.path.exists(module_path):
os.makedirs(module_path)
variables_file_path = os.path.join(module_path, variables_file)
if not clone_repository_with_defined_file(repo_link=git_link, clone_branch=branch,
file_output_path=variables_file_path,
private_token=git_access_token, clone_file_path=variables_file):
logging.debug("Failed to clone module!! Skipping Helm File Preparation")
continue
_module_data = convert_yaml_to_json(variables_file_path)
env_variables_from_yml = _module_data.get('deployment', {}).get('environmentVar', [])
env_variables_from_yml = [_v for _v in env_variables_from_yml if
{'name', 'value'}.issubset(set(list(_v.keys())))]
template_file = _data.get("template_file") or "helm_service_deployment.yaml"
session_obj = get_db_for_func()
module_info = db_handler.get_module_versions(
input_data=GetRequest(module_name=module_name, client='iLens', ilens_version=_ilens_version,
release_version=_release_version), db=session_obj)
session_obj.close()
_data["image_tag"] = module_info.get("image_tag", "-")
render_helm_chart(helm_name=module_name, data_dict=_data, variables_list=env_variables_from_yml,
helm_template_file=template_file)
push_helm_deployments(helm_repo, git_access_token, _ilens_version, _release_version, _client_name)
shutil.rmtree("./tmp")
shutil.rmtree(HELM_STORE_PATH)
\ No newline at end of file
This diff is collapsed.
from dataclasses import dataclass, field
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
import __version__
from scripts.db.psql.create_default_tables import create_default_psql_dependencies
from scripts.services import router
@dataclass
class FastAPIConfig:
title: str = "ILens Version Management"
description: str = "iLens Module Management and version Upgrades"
version: str = __version__.version
redoc_url: str = field(default=None)
app = FastAPI(**FastAPIConfig().__dict__)
app.include_router(router)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["GET", "POST", "DELETE", "PUT"],
allow_headers=["*"],
)
@app.on_event("startup")
async def startup_event():
"""
This function is to create default database and tables,
"""
create_default_psql_dependencies()
GitPython==3.1.27
gitdb==4.0.9
python-dotenv~=0.21.0
jinja2~=3.1.2
fastapi~=0.73.0
sqlalchemy~=1.3.24
python-gitlab~=3.9.0
ruamel.yaml~=0.17.21
\ No newline at end of file
import os
import sys
from configparser import BasicInterpolation, ConfigParser
class EnvInterpolation(BasicInterpolation):
"""
Interpolation which expands environment variables in values.
"""
def before_get(self, parser, section, option, value, defaults):
value = super().before_get(parser, section, option, value, defaults)
if not os.path.expandvars(value).startswith('$'):
return os.path.expandvars(value)
else:
return
try:
config = ConfigParser(interpolation=EnvInterpolation())
config.read(f"conf/application.conf")
except Exception as e:
print(f"Error while loading the config: {e}")
print("Failed to Load Configuration. Exiting!!!")
sys.stdout.flush()
sys.exit()
class Service:
port = config["SERVICE"]["port"]
host = config["SERVICE"]["host"]
class DBConf:
DB_URI = config.get('SQL_DB', 'uri')
if not DB_URI:
print("Error, environment variable DB_URI not set")
sys.exit(1)
class Logging:
level = config.get("LOGGING", "level", fallback="DEBUG")
level = level if level else "INFO"
tb_flag = config.getboolean("LOGGING", "traceback", fallback=True)
tb_flag = tb_flag if tb_flag is not None else True
from copy import deepcopy
import pendulum
from sqlalchemy.orm import Session
from scripts.db.psql.ilens_version_table import ILensVersionMaster
from scripts.logging import logger
from scripts.schemas import InsertRequest, GetRequest, TableInsertSchema
class ILensVersionHandler:
def __init__(self):
...
@staticmethod
def update_module_versions(input_data: InsertRequest, db: Session):
ilens_version_db = ILensVersionMaster(db=db)
try:
curr_time = pendulum.now()
if input_data.prefix_image.endswith("/"):
input_data.prefix_image = input_data.prefix_image[:-1]
db_json = dict(module_name=input_data.module_name, client=input_data.client,
ilens_version=input_data.ilens_version,
release_version=input_data.release_version,
feature_version=0, patch_version=0)
if (existing_data := ilens_version_db.read_data_from_db(input_data=GetRequest(**input_data.dict()),
db=db)):
if input_data.insert_type == "ilens_version":
existing_data["ilens_version"] += 1
existing_data.update(feature_version=0, patch_version=0)
elif input_data.insert_type == "release":
existing_data["release_version"] += 1
existing_data.update(feature_version=0, patch_version=0)
elif input_data.insert_type == "feature":
existing_data["feature_version"] += 1
else:
existing_data["patch_version"] += 1
if not existing_data:
existing_data = deepcopy(db_json)
concat_string = f'v{existing_data["ilens_version"]}.{existing_data["release_version"]}'
existing_data["image_tag"] = f'{input_data.prefix_image}/' \
f'{concat_string}:{existing_data["module_name"]}-{concat_string}.{existing_data["feature_version"]}.{existing_data["patch_version"]}'
existing_data.pop("last_updated_at", None)
return ilens_version_db.add_module_version_record(
insert_record=TableInsertSchema(**existing_data, last_updated_at=curr_time))
except Exception as e:
logger.exception(f'Exception Occurred while updating the record in db {e.args}')
raise
@staticmethod
def get_module_versions(input_data: GetRequest, db: Session):
ilens_version_db = ILensVersionMaster(db=db)
try:
if existing_data := ilens_version_db.read_data_from_db(input_data=GetRequest(**input_data.dict()),
db=db):
return existing_data
except Exception as e:
logger.exception(f'Exception Occurred while getting the record in db {e.args}')
raise
return {}
import sys
from sqlalchemy import create_engine
from sqlalchemy_utils import database_exists, create_database
from scripts.config import DBConf
from scripts.db.psql.models import ILensVersionTable
from scripts.logging import logger
engine = create_engine(DBConf.DB_URI)
def create_default_psql_dependencies():
try:
if not database_exists(engine.url):
create_database(engine.url)
ILensVersionTable.__table__.create(bind=engine, checkfirst=True)
except Exception as e:
logger.error(f"Error occurred while creating: {e}", exc_info=True)
sys.exit()
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
# from scripts.constants.app_constants import TableNames
from scripts.config import DBConf
# table_name = "UserDataEntryTable"
engine = create_engine(DBConf.DB_URI)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
# Dependency
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
def get_db_for_func():
return SessionLocal()
from fastapi.encoders import jsonable_encoder
from sqlalchemy import select, func, desc
from sqlalchemy.orm import Session
from scripts.db.psql.models import ILensVersionTable
from scripts.logging import logger
from scripts.schemas import TableInsertSchema, GetRequest
from scripts.utils.postgres_util import SQLDBUtils
class ILensVersionMaster(SQLDBUtils):
def __init__(self, db: Session):
super().__init__(db)
self.table = ILensVersionTable
def add_module_version_record(self, insert_record: TableInsertSchema):
try:
version_db = self.table(**insert_record.dict())
self.session.add(version_db)
self.session.commit()
self.session.refresh(version_db)
return version_db
except Exception as e:
logger.error(f"Could not insert a new record: {e}")
raise
def read_data_from_db(self, input_data: GetRequest, db: Session):
try:
if data := (db.query(self.table).order_by(desc(self.table.ilens_version), desc(self.table.release_version),
desc(self.table.feature_version),
desc(self.table.patch_version)).filter(
self.table.module_name == input_data.module_name, self.table.client == input_data.client)):
if input_data.ilens_version:
data = data.filter(self.table.ilens_version == input_data.ilens_version)
if input_data.release_version:
data = data.filter(self.table.release_version == input_data.release_version)
data = data.first()
return jsonable_encoder(data)
else:
return None
except Exception as e:
logger.exception(e)
raise
def update_data_in_db(self, input_data: GetRequest, db: Session):
try:
if data := (db.query(self.table).filter(self.table.module_name == input_data.module_name,
self.table.client == input_data.client).first()):
return jsonable_encoder(data)
else:
return None
except Exception as e:
logger.exception(e)
raise
def get_data_by_module_name_query(self, input_data: GetRequest, db: Session, limit_value: int = 1):
try:
return select(*self.table.__table__.columns,
func.concat(self.table.ilens_version, ".", self.table.release_version, ".",
self.table.feature_version, ".", self.table.patch_version).label(
"module_tag")).order_by(self.table.last_updated_at).filter(
self.table.module_name == input_data.module_name, self.table.client == input_data.client).limit(
limit_value)
except Exception as e:
logger.exception(e)
raise
from sqlalchemy import Column, Integer, String, Index, TIMESTAMP
from scripts.db.psql.databases import Base
class ILensVersionTable(Base):
__tablename__ = "ilens_version_tags"
id = Column(Integer, autoincrement=True, primary_key=True)
module_name = Column(String, index=True, nullable=False)
client = Column(String, index=True, nullable=False)
ilens_version = Column(Integer, nullable=False)
release_version = Column(Integer, nullable=False)
feature_version = Column(Integer, nullable=False)
patch_version = Column(Integer, nullable=False)
image_tag = Column(String, nullable=False)
last_updated_at = Column(TIMESTAMP(timezone=True), nullable=False, index=True)
__table_args__ = (Index('version_index', "module_name", "client"),)
import logging
import os
from logging import StreamHandler
from logging.handlers import RotatingFileHandler, SocketHandler
import yaml
from scripts.config import Logging
# this method is to read the configuration from backup.conf
def read_configuration(file_name):
"""
:param file_name:
:return: all the configuration constants
"""
with open(file_name, 'r') as stream:
try:
return yaml.safe_load(stream)
except Exception as e:
print(f"Failed to load Configuration. Error: {e}")
config = read_configuration("scripts/logging/logger_conf.yml")
logging_config = config["logger"]
logging_config["level"] = Logging.level
enable_traceback: bool = Logging.tb_flag
def get_logger():
"""
Creates a rotating log
"""
__logger__ = logging.getLogger('')
__logger__.setLevel(logging_config["level"].upper())
log_formatter = '%(asctime)s - %(levelname)-6s - [%(threadName)5s:%(funcName)5s():''' \
'%(lineno)s] - %(message)s'
time_format = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(log_formatter, time_format)
for each_handler in logging_config["handlers"]:
if each_handler["type"] in ["RotatingFileHandler"]:
if not os.path.exists("logs"):
os.makedirs("logs")
log_file = os.path.join("logs", f"{logging_config['name']}.log")
temp_handler = RotatingFileHandler(log_file,
maxBytes=each_handler["max_bytes"],
backupCount=each_handler["back_up_count"])
temp_handler.setFormatter(formatter)
elif each_handler["type"] in ["SocketHandler"]:
temp_handler = SocketHandler(each_handler["host"], each_handler["port"])
elif each_handler["type"] in ["StreamHandler"]:
temp_handler = StreamHandler()
temp_handler.setFormatter(formatter)
else:
temp_handler = None
__logger__.addHandler(temp_handler)
return __logger__
logger = get_logger()
logger:
name: helm-automation-script
level: DEBUG
handlers:
- type: RotatingFileHandler
file_path: logs/
max_bytes: 100000000
back_up_count: 5
- type: StreamHandler
name: helm-automation-script
import logging
import os
from logging import StreamHandler
from logging.handlers import RotatingFileHandler, SocketHandler
import yaml
from scripts.config import Logging
# this method is to read the configuration from backup.conf
def read_configuration(file_name):
"""
:param file_name:
:return: all the configuration constants
"""
with open(file_name, "r") as stream:
try:
return yaml.safe_load(stream)
except Exception as e:
print(f"Failed to load Configuration. Error: {e}")
config = read_configuration("scripts/logging/logger_conf.yml")
logging_config = config["logger"]
logging_config["level"] = Logging.level
enable_traceback: bool = Logging.tb_flag
def get_logger():
"""
Creates a rotating log
"""
__logger__ = logging.getLogger("")
__logger__.setLevel(logging_config["level"].upper())
log_formatter = (
"%(asctime)s - %(levelname)-6s - [%(threadName)5s:%(funcName)5s():"
""
"%(lineno)s] - %(message)s"
)
time_format = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(log_formatter, time_format)
for each_handler in logging_config["handlers"]:
if each_handler["type"] in ["RotatingFileHandler"]:
log_file = os.path.join(
each_handler["file_path"] + logging_config["name"] + ".log"
)
if not os.path.exists(each_handler["file_path"]):
os.makedirs(each_handler["file_path"])
temp_handler = RotatingFileHandler(
log_file,
maxBytes=each_handler["max_bytes"],
backupCount=each_handler["back_up_count"],
)
temp_handler.setFormatter(formatter)
elif each_handler["type"] in ["SocketHandler"]:
temp_handler = SocketHandler(each_handler["host"], each_handler["port"])
elif each_handler["type"] in ["StreamHandler"]:
temp_handler = StreamHandler()
temp_handler.setFormatter(formatter)
else:
temp_handler = None
__logger__.addHandler(temp_handler)
return __logger__
logger = get_logger()
from datetime import datetime
from typing import Optional
from pydantic import BaseModel
class TableInsertSchema(BaseModel):
module_name: str
client: str
ilens_version: int
release_version: int
feature_version: int
patch_version: int
image_tag: str
last_updated_at: datetime
class Config:
orm_mode = True
class InsertRequest(BaseModel):
module_name: str
client: Optional[str] = 'iLens'
insert_type: Optional[str] = "release"
ilens_version: Optional[int] = 6
release_version: Optional[int] = 0
prefix_image: Optional[str] = "azrilensprod.azurecr.io/ilens/release/versions/"
class GetRequest(BaseModel):
module_name: str
client: Optional[str] = 'iLens'
ilens_version: Optional[int]
release_version: Optional[int]
from fastapi import Depends, APIRouter
from sqlalchemy.orm import Session
from scripts.core import ILensVersionHandler
from scripts.db.psql.databases import get_db
from scripts.logging import logger
from scripts.schemas import InsertRequest, GetRequest
router = APIRouter()
@router.post("/module/increment_version")
def increment_version_release(input_data: InsertRequest, db: Session = Depends(get_db)):
try:
db_handler = ILensVersionHandler()
return db_handler.update_module_versions(input_data, db)
except Exception as e:
logger.exception(f"Exception occurred while updating the tag version {e.args}")
return {}
@router.get("/module/increment_version")
def get_increment_version_release(input_data: GetRequest = Depends(), db: Session = Depends(get_db)):
try:
db_handler = ILensVersionHandler()
return db_handler.get_module_versions(input_data, db)
except Exception as e:
logger.exception(f"Exception occurred while updating the tag version {e.args}")
return {}
import importlib
import logging
from fastapi.encoders import jsonable_encoder
from sqlalchemy import Text, create_engine
from sqlalchemy.orm import Session
from scripts.config import DBConf
class SQLDBUtils:
def __init__(self, db: Session):
self.session: Session = db
def close(self):
logging.debug("SQL Session closed")
self.session.close()
@property
def key_filter_expression(self):
return "expression"
@property
def key_filter_column(self):
return "column"
@property
def key_filter_value(self):
return "value"
def add_data(self, table):
self.session.add(table)
self.session.commit()
self.session.refresh(table)
return True
def bulk_insert(self, object_models):
self.session.bulk_save_objects(object_models)
self.session.commit()
return True
def filter_expression(self):
filter_expression = self.filter.get(self.key_filter_expression, "eq")
logging.debug(f"Filter expression: {filter_expression}")
return filter_expression
def filter_column(self):
column = self.filter.get(self.key_filter_column, None)
logging.debug(f"Filter column: {column}")
return column
def filter_value(self):
filter_value = self.filter.get(self.key_filter_value, None)
logging.debug(f"Filter value: {filter_value}")
return filter_value
def _filter(self, session_query, filters=None):
if filters is not None:
for _filter in filters:
self.filter = _filter
if self.filter_column() is None:
continue
session_query = self.get_session_query(session_query=session_query)
return session_query
def get_session_query(self, session_query):
try:
if self.filter_expression() == "eq":
session_query = session_query.filter(
self.filter_column() == self.filter_value()
)
if self.filter_expression() == "le":
session_query = session_query.filter(
self.filter_column() < self.filter_value()
)
if self.filter_expression() == "ge":
session_query = session_query.filter(
self.filter_column() > self.filter_value()
)
if self.filter_expression() == "lte":
session_query = session_query.filter(
self.filter_column() <= self.filter_value()
)
if self.filter_expression() == "gte":
session_query = session_query.filter(
self.filter_column() >= self.filter_value()
)
if self.filter_expression() == "neq":
session_query = session_query.filter(
self.filter_column() != self.filter_value()
)
if self.filter_expression() == "none":
session_query = session_query.filter(
self.filter_column().is_(None)
)
if self.filter_expression() == "is_none":
session_query = session_query.filter(
self.filter_column().is_not(None)
)
except Exception as e:
logging.error(f"Error occurred while filtering the session query {e}")
return session_query
def insert_one(self, table, insert_json):
try:
row = table()
for k in insert_json:
setattr(row, k, insert_json[k])
self.session.merge(row)
self.session.commit()
return True
except Exception as e:
logging.error(f"Error while inserting the record {e}")
raise
def update(self, table, update_json, filters=None, insert=False, insert_id=None, update_one=False):
try:
logging.debug(filters)
row = self.session.query(table)
filtered_row = self._filter(session_query=row, filters=filters)
if update_one:
filtered_row = filtered_row.first()
if filtered_row is None:
logging.debug("There are no rows meeting the given update criteria.")
if insert:
logging.debug("Trying to insert a new record")
if insert_id is None:
logging.warning(
"ID not provided to insert record. Skipping insert."
)
return False
else:
update_json.update(insert_id)
if self.insert_one(table=table, insert_json=update_json):
return True
else:
return False
else:
return False
else:
logging.debug("Record available to update")
for k in update_json:
if not update_json[k]:
continue
setattr(filtered_row, k, update_json[k])
if not update_one:
filtered_row.update(values=update_json)
self.session.commit()
except Exception as e:
logging.error(f"Error while updating the record {e}")
raise
def delete(self, table, filters=None):
try:
# logging.trace(filters)
row = self.session.query(table)
filtered_row = self._filter(session_query=row, filters=filters)
if filtered_row is None:
logging.debug("There were no records to be deleted")
else:
filtered_row.delete()
self.session.commit()
return True
except Exception as e:
logging.error(f"Failed to delete a record {e}")
raise
def distinct_values_by_column(self, table, column, filters=None):
query = self.session.query(getattr(table, column).distinct().label(column))
query = self._filter(session_query=query, filters=filters)
distinct_values = [getattr(row, column) for row in query.all()]
return distinct_values
def select_from_table(self, table=None, query=None, find_one=False):
if query is None:
query = f"select * from {table}"
result = self.session.execute(query)
for i in result:
print(i)
response = [dict(zip(row.keys(), row.values())) for row in result]
if find_one and response:
return response[0]
return response
def fetch_from_table(self, table, filter_text, limit_value, skip_value):
logging.debug(filter_text)
row = (
self.session.query(table)
.filter(Text(filter_text))
.limit(limit_value)
.offset(skip_value)
)
result = self.session.execute(row)
return [dict(zip(row.keys(), row.values())) for row in result]
def execute_query(self, session, table=None, query=None):
try:
if query is None:
query = f"select * from {table}"
result = self.session.execute(query)
# output = [dict(zip(row.keys(), row.values())) for row in result]
output = [x for x in result]
self.session.close()
return output
except Exception as e:
logging.error(f"Error occurred during execute_query: {e}")
def fetch_query(self, query):
try:
result = self.session.execute(query)
output = [jsonable_encoder(x) for x in result]
self.session.close()
return output[0] if len(output) == 1 else output
except Exception as e:
logging.error(f"Error occurred during execute_query: {e}")
def create_table(table_name):
try:
engine = create_engine(DBConf.DB_URI, echo=True)
if not engine.dialect.has_table(engine, table_name):
table_models = importlib.import_module('scripts.db.db_models')
ORMTable = getattr(table_models, table_name)
ORMTable.__table__.create(bind=engine, checkfirst=True)
except Exception as e:
logging.error(f"Error occurred during start-up: {e}", exc_info=True)
return True
name: <{ module_name }>
codeType: backend
type: core
affinity:
enabled: True
values: [ "klblrserv24","klblrserv23" ]
podAutoScaler:
enabled: False
type: "hpa"
maxReplicaCount: 3
minReplicaCount: 1
scalePercentage: 85
deployment:
imageName: <{ image_tag }>
command: <{ command }>
PullSecrets: <{ PullSecrets | default("ilens-azregistry") }>
PullPolicy: IfNotPresent <{ branch }>
resources:
requests:
memory: <{ request_memory | default("250Mi") }>
cpu: <{ request_cpu | default("250Mi") }>
limits:
memory: <{ limit_memory | default("750Mi") }>
cpu: <{ limit_cpu | default("500Mi") }>
environmentVar:
- name: MONGO_URI
valueFrom:
secretKeyRef:
name: mongo-uri
key: MONGO_URI
{% for i in variables %}
{% if 'name' in i and 'value' in i %}
- name: '<{ i.name }>',
value: '<{ i.value }>'
{% endif %}
{% endfor %}
mountVolume:
enabled: <{ enableMountVolume }>
volumeMounts:
- name: <{ mount_name | default("core-volumes") }>
mountPath: "/code/data"
volumes:
- name: <{ mount_name | default("core-volumes") }>
persistentVolumeClaim:
claimName: <{ claim_name | default("core-volumes") }>
\ No newline at end of file
name: <{ module_name }>
codeType: backend
type: core
ports:
name: port<{ module_port }>
port: <{ module_port }>
targetPort: <{ module_port }>
nodePort: <{ node_port }>
affinity:
enabled: True
values: [ "klblrserv24","klblrserv23" ]
podAutoScaler:
enabled: False
type: "hpa"
maxReplicaCount: 3
minReplicaCount: 1
scalePercentage: 85
deployment:
imageName: <{ image_tag }>
command: <{ command }>
PullSecrets: <{ PullSecrets | default("ilens-azregistry") }>
PullPolicy: IfNotPresent <{ branch }>
resources:
requests:
memory: <{ request_memory | default("250Mi") }>
cpu: <{ request_cpu | default("250Mi") }>
limits:
memory: <{ limit_memory | default("750Mi") }>
cpu: <{ limit_cpu | default("500Mi") }>
livenessProbe:
enabled: <{ enable_liveness | default(False)}>
path: <{ liveness_api }>
initialDelaySeconds: <{ liveness_delay_seconds | default(20) }>
timeoutSeconds: <{ liveness_timeout_seconds | default(5) }>
periodSeconds: <{ liveness_period_seconds | default(10) }>
failureThreshold: <{ liveness_failure_threshold | default(3) }>
successThreshold: <{ liveness_success_threshold | default(1) }>
environmentVar:
- name: MONGO_URI
valueFrom:
secretKeyRef:
name: mongo-uri
key: MONGO_URI
{% for i in variables %}
{% if 'name' in i and 'value' in i %}
- name: '<{ i.name }>',
value: '<{ i.value }>'
{% endif %}
{% endfor %}
mountVolume:
enabled: <{ enableMountVolume }>
volumeMounts:
- name: <{ mount_name | default("core-volumes") }>
mountPath: "/code/data"
volumes:
- name: <{ mount_name | default("core-volumes") }>
persistentVolumeClaim:
claimName: <{ claim_name | default("core-volumes") }>
service:
name: <{ module_name }>
type: NodePort
\ No newline at end of file
import jinja2
in_file = 'data.yaml'
out_file = 'output.yaml'
environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(searchpath='.'),
trim_blocks=True, variable_start_string='<{', variable_end_string='}>'
)
_render = environment.get_template(in_file).render(variables=[{
"name": "harsha",
"value": "hi"
}, {
"name": "vardhan",
"value": "hello"
}])
print(_render)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment