Compare commits

..

No commits in common. "f8b0ba54ba5a2f508fe09c52672f95f74b279161" and "6ce3e4accec8f53cf1a86a437849d63ca4d2f9bf" have entirely different histories.

424 changed files with 7014 additions and 34340 deletions

126
.gitignore vendored
View File

@ -1,26 +1,108 @@
# Logs # Created by .ignore support plugin (hsz.mobi)
logs ### Python template
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log *.log
npm-debug.log* local_settings.py
yarn-debug.log* db.sqlite3
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules # Flask stuff:
dist instance/
dist-ssr .webassets-cache
*.local
# Editor directories and files # Scrapy stuff:
.vscode/* .scrapy
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?
.env # Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.idea/*

View File

@ -1,12 +0,0 @@
# React + Vite
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
Currently, two official plugins are available:
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) for Fast Refresh
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
## Expanding the ESLint configuration
If you are developing a production application, we recommend using TypeScript with type-aware lint rules enabled. Check out the [TS template](https://github.com/vitejs/vite/tree/main/packages/create-vite/template-react-ts) for information on how to integrate TypeScript and [`typescript-eslint`](https://typescript-eslint.io) in your project.

119
alembic.ini Normal file
View File

@ -0,0 +1,119 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
# Use forward slashes (/) also on windows to provide an os agnostic path
script_location = migrations
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
# version_path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
version_path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = postgresql://postgres:/*7984@localhost:5432/ai-appointment
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

49
apis/__init__.py Normal file
View File

@ -0,0 +1,49 @@
from fastapi import APIRouter, Depends
from middleware.auth_dependency import auth_required
from middleware.auth_secret import verify_secret
from fastapi.security import HTTPBearer
# Import the security scheme
bearer_scheme = HTTPBearer(scheme_name="Bearer Authentication")
from .endpoints import clinics, doctors, calender, appointments, patients, admin, auth, s3, users, clinicDoctor, dashboard, call_transcripts, notifications,sns, stripe, agent
api_router = APIRouter()
api_router.include_router(clinics.router, prefix="/clinics", tags=["clinics"], dependencies=[Depends(auth_required)])
api_router.include_router(doctors.router, prefix="/doctors", tags=["doctors"])
api_router.include_router(calender.router, prefix="/calender", tags=["calender"])
api_router.include_router(appointments.router, prefix="/appointments", tags=["appointments"])
api_router.include_router(patients.router, prefix="/patients", tags=["patients"])
api_router.include_router(sns.router, prefix="/sns", tags=["sns"], include_in_schema=False)
api_router.include_router(stripe.router, prefix="/stripe", tags=["stripe"])
api_router.include_router(
admin.router,
prefix="/admin",
dependencies=[Depends(auth_required)],
tags=["admin"])
api_router.include_router(auth.router, prefix="/auth", tags=["auth"])
api_router.include_router(s3.router, prefix="/s3", tags=["s3"])
api_router.include_router(users.router, prefix="/users", tags=["users"], dependencies=[Depends(auth_required)])
api_router.include_router(clinicDoctor.router, prefix="/clinic-doctors", tags=["clinic-doctors"], dependencies=[Depends(auth_required)])
api_router.include_router(dashboard.router, prefix="/dashboard", tags=["dashboard"], dependencies=[Depends(auth_required)])
api_router.include_router(call_transcripts.router, prefix="/call-transcripts", tags=["call-transcripts"])
api_router.include_router(notifications.router, prefix="/notifications", tags=["notifications"], dependencies=[Depends(auth_required)])
# agent (bot) routes
api_router.include_router(agent.router, prefix="/agent", tags=["agent"], dependencies=[Depends(verify_secret)], include_in_schema=False)

View File

115
apis/endpoints/admin.py Normal file
View File

@ -0,0 +1,115 @@
from fastapi import APIRouter, Request
from services.clinicServices import ClinicServices
from schemas.UpdateSchemas import ClinicStatusUpdate
from schemas.ApiResponse import ApiResponse
from schemas.BaseSchemas import MasterAppointmentTypeBase, ClinicOffersBase
from services.authService import AuthService
from services.masterAppointmentServices import MasterAppointmentServices
from schemas.CreateSchemas import CreateSuperAdmin, UpdateSuperAdmin
from utils.constants import DEFAULT_LIMIT, DEFAULT_PAGE
router = APIRouter()
@router.put("/clinic/status")
async def update_clinic_status(req:Request, data: ClinicStatusUpdate):
await ClinicServices().update_clinic_status(req.state.user, data.clinic_id, data.status, data.documentStatus, data.rejection_reason)
return ApiResponse(data="OK", message="Clinic status updated successfully")
@router.post("/user")
async def create_user(req:Request, user_data: CreateSuperAdmin):
await AuthService().create_super_admin(req.state.user, user_data)
return ApiResponse(data="OK", message="User created successfully")
@router.put("/user/{user_id}")
async def update_user(req:Request, user_id: int, user_data: UpdateSuperAdmin):
await AuthService().update_super_admin(req.state.user, user_id, user_data)
return ApiResponse(data="OK", message="User updated successfully")
@router.delete("/user/{user_id}")
async def delete_user(req:Request, user_id: int):
await AuthService().delete_super_admin(req.state.user, user_id)
return ApiResponse(data="OK", message="User deleted successfully")
@router.get("/")
async def get_users(req:Request, limit:int = DEFAULT_LIMIT, page:int = DEFAULT_PAGE, search:str = ""):
if page < 1:
page = 1
offset = (page - 1) * limit
users = await AuthService().get_admins(req.state.user, limit, offset, search)
return ApiResponse(data=users, message="Users retrieved successfully")
@router.post("/master-data")
async def create_master_data(appointment_type: MasterAppointmentTypeBase):
await MasterAppointmentServices().create_master_appointment_type(appointment_type)
return ApiResponse(data="OK", message="Master data created successfully")
@router.delete("/master-data/{master_appointment_type_id}")
async def delete_master_data(master_appointment_type_id: int):
await MasterAppointmentServices().delete_master_appointment_type(master_appointment_type_id)
return ApiResponse(data="OK", message="Master data deleted successfully")
@router.put("/master-data/{master_appointment_type_id}")
async def update_master_data(master_appointment_type_id: int, appointment_type: MasterAppointmentTypeBase):
await MasterAppointmentServices().update_master_appointment_type(master_appointment_type_id, appointment_type)
return ApiResponse(data="OK", message="Master data updated successfully")
@router.get("/master-data")
async def get_master_data(
limit: int = DEFAULT_LIMIT,
page: int = DEFAULT_PAGE,
search: str = ""
):
if page < 1:
page = 1
offset = (page - 1) * limit
appointment_types = await MasterAppointmentServices().get_master_appointment_types(limit, offset, search)
return ApiResponse(data=appointment_types, message="Master data retrieved successfully")
@router.get("/clinic/offers")
async def get_clinic_offers(
req:Request,
page: int = DEFAULT_PAGE,
limit: int = DEFAULT_LIMIT,
search:str = ""
):
if page < 1:
page = 1
offset = (page - 1) * limit
clinic_offers = await ClinicServices().get_clinic_offers(req.state.user, limit, offset, search)
return ApiResponse(data=clinic_offers, message="Clinic offers retrieved successfully")
@router.post("/clinic/offer")
async def create_clinic_offer(
req:Request,
clinic_offer: ClinicOffersBase
):
await ClinicServices().create_clinic_offer(req.state.user, clinic_offer)
return ApiResponse(data="OK", message="Clinic offer created successfully")
@router.put("/clinic/offer/{clinic_offer_id}")
async def update_clinic_offer(
req:Request,
clinic_offer_id: int,
clinic_offer: ClinicOffersBase
):
await ClinicServices().update_clinic_offer(req.state.user, clinic_offer_id, clinic_offer)
return ApiResponse(data="OK", message="Clinic offer updated successfully")
@router.delete("/clinic/offer/{clinic_offer_id}")
async def delete_clinic_offer(
req:Request,
clinic_offer_id: int
):
await ClinicServices().delete_clinic_offer(req.state.user, clinic_offer_id)
return ApiResponse(data="OK", message="Clinic offer deleted successfully")

18
apis/endpoints/agent.py Normal file
View File

@ -0,0 +1,18 @@
'''
this route is for agent (bot)
'''
from fastapi import APIRouter
from services.agentServices import AgentServices
router = APIRouter()
@router.get("/clinic/docs/{clinic_id}")
async def get_clinic_doctors_with_appointments(clinic_id: int):
return await AgentServices().get_clinic_doctors_with_appointments(clinic_id)
@router.get("/clinic/{phone}")
async def get_clinic_by_phone(phone: str):
return await AgentServices().get_clinic_by_phone(phone)

View File

@ -0,0 +1,117 @@
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from typing import List
# database
from database import get_db
from schemas.ResponseSchemas import AppointmentDetailed, AppointmentSchema
from models.Appointments import Appointments
from schemas.CreateSchemas import AppointmentCreate, AppointmentCreateWithNames
from models.Doctors import Doctors
from models.Patients import Patients
router = APIRouter()
@router.get(
"/", response_model=List[AppointmentDetailed], status_code=status.HTTP_200_OK
)
def get_appointments(
doc_name: str | None = None,
patient_name: str | None = None,
skip: int = 0,
limit: int = 100,
db: Session = Depends(get_db),
):
"""
Get a list of appointments with optional pagination.
"""
try:
query = db.query(Appointments)
if doc_name:
query = query.join(Appointments.doctor).filter(
Doctors.name.ilike(f"%{doc_name}%")
)
if patient_name:
query = query.join(Appointments.patient).filter(
Patients.name.ilike(f"%{patient_name}%")
)
appointments = query.offset(skip).limit(limit).all()
return appointments
except Exception as e:
raise HTTPException(
status_code=500,
detail=str(e.__cause__),
) from e
# @router.post("/", response_model=AppointmentSchema, status_code=status.HTTP_201_CREATED)
# def create_appointment(appointment: AppointmentCreate, db: Session = Depends(get_db)):
# """
# Create a new appointment.
# """
# try:
# db_appointment = Appointments(**appointment.model_dump())
# db.add(db_appointment)
# db.commit()
# db.refresh(db_appointment)
# return db_appointment
# except Exception as e:
# db.rollback()
# raise HTTPException(
# status_code=500,
# detail=str(e.__cause__),
# ) from e
@router.post("/", response_model=AppointmentSchema, status_code=status.HTTP_201_CREATED)
def create_appointment_with_names(
appointment: AppointmentCreateWithNames, db: Session = Depends(get_db)
):
"""
Create a new appointment using doctor name and patient name instead of IDs.
"""
try:
# Find doctor by name
doctor = (
db.query(Doctors).filter(Doctors.name == appointment.doctor_name).first()
)
if not doctor:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Doctor with name '{appointment.doctor_name}' not found",
)
# Find patient by name
patient = (
db.query(Patients).filter(Patients.name == appointment.patient_name).first()
)
if not patient:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Patient with name '{appointment.patient_name}' not found",
)
# Create appointment with doctor_id and patient_id
db_appointment = Appointments(
doctor_id=doctor.id,
patient_id=patient.id,
appointment_time=appointment.appointment_time,
status=appointment.status,
)
db.add(db_appointment)
db.commit()
db.refresh(db_appointment)
return db_appointment
except HTTPException:
db.rollback()
raise
except Exception as e:
db.rollback()
raise HTTPException(
status_code=500,
detail=str(e.__cause__),
) from e

68
apis/endpoints/auth.py Normal file
View File

@ -0,0 +1,68 @@
from fastapi import APIRouter, BackgroundTasks, Request, status
from services.authService import AuthService
from schemas.CreateSchemas import UserCreate
from schemas.ApiResponse import ApiResponse
from schemas.BaseSchemas import AuthBase, AuthOTP, ResetPasswordBase
from services.clinicServices import ClinicServices
router = APIRouter()
@router.post("/login")
async def login(data: AuthBase):
token = await AuthService().login(data)
return ApiResponse(
data=token,
message="Login successful"
)
@router.post("/register")
async def register(user_data: UserCreate, background_tasks: BackgroundTasks):
response = await AuthService().register(user_data, background_tasks)
return ApiResponse(
data=response,
message="User registered successfully"
)
@router.get("/clinic/latest-id")
async def get_latest_clinic_id():
clinic_id = await ClinicServices().get_latest_clinic_id()
return ApiResponse(
data=clinic_id,
message="Latest clinic ID retrieved successfully"
)
@router.post('/admin/forget-password')
async def forget_password(email: str):
await AuthService().forget_password(email)
return ApiResponse(data="OK", message="Password reset email sent successfully")
@router.post('/admin/reset-password')
async def reset_password(data: ResetPasswordBase):
await AuthService().reset_password(data.token, data.password)
return ApiResponse(data="OK", message="Password reset successfully")
@router.post("/send-otp")
async def send_otp(email: str):
await AuthService().send_otp(email)
return ApiResponse(
data="OK",
message="OTP sent successfully"
)
@router.post("/verify-otp")
async def verify_otp(data: AuthOTP):
await AuthService().verify_otp(data)
return ApiResponse(
data="OK",
message="OTP verified successfully"
)
@router.get("/is-valid-domain")
async def is_valid_domain(req:Request):
host = req.client.host
print(host)
is_valid = await ClinicServices().is_valid_domain(host)
return status.HTTP_200_OK if is_valid else status.HTTP_404_NOT_FOUND

View File

@ -0,0 +1,41 @@
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from typing import List
# database
from database import get_db
from models.Calendar import Calenders
from schemas.CreateSchemas import CalendarCreate
from schemas.ResponseSchemas import Calendar
router = APIRouter()
@router.get("/", response_model=List[Calendar])
def get_calendar_events(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
"""
Get a list of calendar events with optional pagination.
"""
# Placeholder for actual database query
events = db.query("CalendarEvents").offset(skip).limit(limit).all()
return events
@router.post("/", response_model=Calendar, status_code=status.HTTP_201_CREATED)
def create_calendar_event(event: CalendarCreate, db: Session = Depends(get_db)):
"""
Create a new calendar event.
"""
try:
db_event = Calenders(**event.model_dump())
db.add(db_event)
db.commit()
db.refresh(db_event)
return db_event
except Exception as e:
db.rollback()
raise HTTPException(
status_code=500,
detail=str(e.__cause__),
) from e

View File

@ -0,0 +1,42 @@
import datetime
from typing import Optional
from fastapi import APIRouter, BackgroundTasks, Depends
from services.callTranscripts import CallTranscriptServices
from middleware.auth_dependency import auth_required
from utils.constants import DEFAULT_LIMIT, DEFAULT_ORDER, DEFAULT_ORDER_BY, DEFAULT_PAGE
from schemas.ApiResponse import ApiResponse
from schemas.CreateSchemas import CallTranscriptsCreate
router = APIRouter()
@router.get("/", dependencies=[Depends(auth_required)])
async def get_call_transcripts(limit: int = DEFAULT_LIMIT, page: int = DEFAULT_PAGE, search: str = "", orderBy: str = DEFAULT_ORDER, order: str = DEFAULT_ORDER_BY, startDate: Optional[datetime.datetime] = None, endDate: Optional[datetime.datetime] = None):
if page == 0:
page = 1
offset = (page - 1) * limit
response = await CallTranscriptServices().get_call_transcripts(limit, offset, search, orderBy, order, startDate, endDate)
return ApiResponse(data=response, message="Call transcripts retrieved successfully")
@router.get("/{key_id}", dependencies=[Depends(auth_required)])
async def download_call_transcript(key_id: str):
service = CallTranscriptServices()
response = await service.download_call_transcript(key_id)
return ApiResponse(data=response, message="Call transcript downloaded successfully")
@router.post("/")
async def create_call_transcript(data: CallTranscriptsCreate):
service = CallTranscriptServices()
await service.create_call_transcript(data)
return ApiResponse(data="OK", message="Call transcript created successfully")
@router.post("/bulk-download", dependencies=[Depends(auth_required)])
async def bulk_download_call_transcripts(
key_ids: list[int], background_tasks: BackgroundTasks
):
service = CallTranscriptServices()
response = await service.bulk_download_call_transcripts(key_ids, background_tasks)
return response

View File

@ -0,0 +1,41 @@
from fastapi import APIRouter
from schemas.ApiResponse import ApiResponse
from schemas.CreateSchemas import ClinicDoctorCreate
from schemas.UpdateSchemas import ClinicDoctorUpdate
from services.clinicDoctorsServices import ClinicDoctorsServices
from fastapi import Request
from utils.constants import DEFAULT_ORDER, DEFAULT_ORDER_BY, DEFAULT_PAGE, DEFAULT_LIMIT
router = APIRouter()
@router.get("/")
async def get_clinic_doctors(
req:Request,
limit:int= DEFAULT_LIMIT,
page:int = DEFAULT_PAGE,
search:str = "",
sort_by:str = DEFAULT_ORDER,
sort_order:str = DEFAULT_ORDER_BY
):
if page < 1:
page = 1
offset = (page - 1) * limit
user = req.state.user
clinic_id = user["created_clinics"][0]["id"]
clinic_doctors = await ClinicDoctorsServices().get_clinic_doctors(clinic_id, limit, offset, search, sort_by, sort_order)
return ApiResponse(data=clinic_doctors, message="Clinic doctors retrieved successfully")
@router.post("/")
async def create_clinic_doctor(req:Request, clinic_doctor: ClinicDoctorCreate):
await ClinicDoctorsServices().create_clinic_doctor(req.state.user, clinic_doctor)
return ApiResponse(data="OK", message="Clinic doctor created successfully")
@router.put("/{clinic_doctor_id}")
async def update_clinic_doctor(req:Request, clinic_doctor_id: int, clinic_doctor: ClinicDoctorUpdate):
await ClinicDoctorsServices().update_clinic_doctor(req.state.user, clinic_doctor_id, clinic_doctor)
return ApiResponse(data="OK", message="Clinic doctor updated successfully")
@router.delete("/{clinic_doctor_id}")
async def delete_clinic_doctor(clinic_doctor_id: int):
await ClinicDoctorsServices().delete_clinic_doctor(clinic_doctor_id)
return ApiResponse(data="OK", message="Clinic doctor deleted successfully")

55
apis/endpoints/clinics.py Normal file
View File

@ -0,0 +1,55 @@
from typing import List, Literal, Union
from fastapi import APIRouter, status, Request
# schemas
from schemas.ResponseSchemas import Clinic
from schemas.UpdateSchemas import ClinicUpdate
# services
from services.clinicServices import ClinicServices
# Constants
from schemas.ApiResponse import ApiResponse
from interface.common_response import CommonResponse
from schemas.BaseSchemas import ClinicOffersBase
from utils.constants import DEFAULT_PAGE, DEFAULT_SKIP, DEFAULT_LIMIT
router = APIRouter()
@router.get("/")
async def get_clinics(
req:Request,
page: int = DEFAULT_PAGE,
limit: int = DEFAULT_LIMIT,
filter_type: Union[Literal["UNREGISTERED"], Literal["REGISTERED"]] = "UNREGISTERED",
search:str = ""
):
if page < 1:
page = 1
offset = (page - 1) * limit
clinics = await ClinicServices().get_clinics(req.state.user, limit, offset, filter_type, search)
return ApiResponse(data=clinics, message="Clinics retrieved successfully" )
@router.get("/verified-files/{clinic_id}")
async def get_verified_files(clinic_id: int):
clinic = await ClinicServices().get_clinic_by_id(clinic_id)
return ApiResponse(data=clinic, message="Clinic retrieved successfully")
@router.get("/{clinic_id}")
async def get_clinic(clinic_id: int):
clinic = await ClinicServices().get_clinic_by_id(clinic_id)
return ApiResponse(data=clinic, message="Clinic retrieved successfully")
@router.put("/{clinic_id}")
async def update_clinic(
req:Request,
clinic_id: int, clinic: ClinicUpdate
):
clinic = await ClinicServices().update_clinic(req.state.user, clinic_id, clinic)
return ApiResponse(data=clinic, message="Clinic updated successfully")
@router.delete("/{clinic_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_clinic(clinic_id: int):
await ClinicServices().delete_clinic(clinic_id)
return ApiResponse(data="OK", message="Clinic deleted successfully")

View File

@ -0,0 +1,24 @@
from fastapi import APIRouter, Request
from services.dashboardService import DashboardService
from schemas.ApiResponse import ApiResponse
from enums.enums import UserType
from schemas.CreateSchemas import SignupPricingMasterCreate
router = APIRouter()
@router.get("/")
async def get_clinic_doctor_status_count(req:Request):
counts = await DashboardService().get_dashboard_counts(req.state.user)
return ApiResponse(data=counts, message="Counts fetched successfully")
@router.post("/signup-pricing-master")
async def update_signup_pricing_master(req:Request, signup_pricing_master:SignupPricingMasterCreate):
user = req.state.user
response = await DashboardService().update_signup_pricing_master(user, signup_pricing_master)
return ApiResponse(data=response, message="Signup pricing master updated successfully")
@router.get("/signup-pricing-master")
async def get_signup_pricing_master():
pricing = await DashboardService().get_signup_pricing_master()
return ApiResponse(data=pricing, message="Signup pricing master fetched successfully")

143
apis/endpoints/doctors.py Normal file
View File

@ -0,0 +1,143 @@
from asyncio.log import logger
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from typing import List, Dict
from datetime import datetime, timedelta
from sqlalchemy import and_
# database
from database import get_db
# schemas
from models.Doctors import Doctors
from models.Appointments import Appointments
from models.Calendar import Calenders
from schemas.ResponseSchemas import (
Doctor,
DoctorWithAppointments,
DoctorWithCalendar,
CalendarTimeSchema,
)
from schemas.CreateSchemas import DoctorCreate
from schemas.UpdateSchemas import DoctorUpdate
from enums.enums import AppointmentStatus
router = APIRouter()
@router.post("/", response_model=Doctor, status_code=status.HTTP_201_CREATED)
def create_doctor(doctor: DoctorCreate, db: Session = Depends(get_db)):
try:
db_doctor = Doctors(**doctor.model_dump())
db.add(db_doctor)
db.commit()
db.refresh(db_doctor)
return db_doctor
except Exception as e:
db.rollback()
raise HTTPException(
status_code=500,
detail=str(e.__cause__),
) from e
@router.get("/", response_model=List[DoctorWithCalendar])
def read_doctors(
doctor_name: str | None = None,
skip: int = 0,
limit: int = 100,
db: Session = Depends(get_db),
):
query = db.query(Doctors)
if doctor_name:
query = query.filter(Doctors.name.ilike(f"%{doctor_name}%"))
doctors = query.offset(skip).limit(limit).all()
return doctors
@router.get("/available-slots/{doctor_name}", response_model=Dict[str, List[str]])
def get_available_slots(
doctor_name: str | None = None,
date: str | None = datetime.now().strftime("%Y-%m-%d"),
db: Session = Depends(get_db),
):
"""
Get available slots for a doctor on a specific date.
date format: YYYY-MM-DD
"""
# Get the doctor
print(f"-----------------doctor_name: {doctor_name}")
doctor = db.query(Doctors).filter(Doctors.name.ilike(f"%{doctor_name}%")).first()
if not doctor:
raise HTTPException(status_code=404, detail="Doctor not found")
# Get all calendar slots for the doctor
calendar_slots = db.query(Calenders).filter(Calenders.doc_id == doctor.id).all()
if not calendar_slots:
return {"available_slots": []}
available_slots = [slot.time for slot in calendar_slots]
try:
target_date = datetime.strptime(date, "%Y-%m-%d").date()
except ValueError:
raise HTTPException(
status_code=400, detail="Invalid date format. Use YYYY-MM-DD"
)
# Get all appointments for the doctor on the specified date
appointments = (
db.query(Appointments)
.filter(
and_(
Appointments.doctor_id == doctor.id,
Appointments.appointment_time >= target_date,
Appointments.appointment_time < target_date + timedelta(days=1),
)
)
.all()
)
# Remove slots that have appointments
for appointment in appointments:
appointment_time = appointment.appointment_time.strftime("%H:%M")
if appointment_time in available_slots and (
not appointment.status == AppointmentStatus.COMPLETED
):
available_slots.remove(appointment_time)
return {"available_slots": available_slots}
# @router.get("/{doctor_name}", response_model=DoctorWithAppointments)
# def read_doctor(doctor_name: str, db: Session = Depends(get_db)):
# db_doctor = db.query(Doctors).filter(Doctors.name.ilike(f"%{doctor_name}%")).all()
# return db_doctor
@router.put("/{doctor_id}", response_model=Doctor)
def update_doctor(doctor_id: int, doctor: DoctorUpdate, db: Session = Depends(get_db)):
db_doctor = db.query(Doctors).filter(Doctors.id == doctor_id).first()
if db_doctor is None:
raise HTTPException(status_code=404, detail="Doctor not found")
update_data = doctor.model_dump(exclude_unset=True)
for key, value in update_data.items():
setattr(db_doctor, key, value)
db.commit()
db.refresh(db_doctor)
return db_doctor
@router.delete("/{doctor_id}", status_code=status.HTTP_204_NO_CONTENT)
def delete_doctor(doctor_id: int, db: Session = Depends(get_db)):
db_doctor = db.query(Doctors).filter(Doctors.id == doctor_id).first()
if db_doctor is None:
raise HTTPException(status_code=404, detail="Doctor not found")
db.delete(db_doctor)
db.commit()
return None

View File

@ -0,0 +1,37 @@
from fastapi import APIRouter
from utils.constants import DEFAULT_LIMIT, DEFAULT_PAGE
from services.notificationServices import NotificationServices
from schemas.ApiResponse import ApiResponse
from fastapi import Request
router = APIRouter()
@router.get("/")
def get_notifications(request: Request, limit: int = DEFAULT_LIMIT, page: int = DEFAULT_PAGE):
if page <0:
page = 1
offset = (page - 1) * limit
notifications = NotificationServices().getNotifications(request.state.user["id"], limit, offset)
return ApiResponse(data=notifications, message="Notifications retrieved successfully")
@router.delete("/")
def delete_notification(notification_id: int):
NotificationServices().deleteNotification(notification_id)
return ApiResponse(data="OK", message="Notification deleted successfully")
@router.put("/")
def update_notification_status(notification_id: int):
NotificationServices().updateNotificationStatus(notification_id)
return ApiResponse(data="OK", message="Notification status updated successfully")
@router.post("/")
def send_notification(title: str, message: str, sender_id: int, receiver_id: int):
NotificationServices().createNotification(title, message, sender_id, receiver_id)
return ApiResponse(data="OK", message="Notification sent successfully")
@router.post("/fcm")
def send_fcm_notification(req: Request, token: str):
NotificationServices().createOrUpdateFCMToken(req.state.user["id"], token)
return ApiResponse(data="OK", message="FCM Notification sent successfully")

View File

@ -0,0 +1,50 @@
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from typing import List
# database
from database import get_db
from models.Patients import Patients
from schemas.CreateSchemas import PatientCreate
from schemas.ResponseSchemas import Patient
router = APIRouter()
@router.get("/", response_model=List[Patient])
def read_patients(
name: str | None = None,
dob: str | None = None,
skip: int = 0,
limit: int = 100,
db: Session = Depends(get_db),
):
"""
Get a list of patients with optional pagination.
"""
query = db.query(Patients)
if name:
query = query.filter(Patients.name.ilike(f"%{name}%"))
if dob:
query = query.filter(Patients.dob == dob)
patients = query.offset(skip).limit(limit).all()
return patients
@router.post("/", response_model=Patient, status_code=status.HTTP_201_CREATED)
def create_patient(patient: PatientCreate, db: Session = Depends(get_db)):
"""
Create a new patient.
"""
try:
db_patient = Patients(**patient.model_dump())
db.add(db_patient)
db.commit()
db.refresh(db_patient)
return db_patient
except Exception as e:
db.rollback()
raise HTTPException(
status_code=500,
detail=str(e.__cause__),
) from e

14
apis/endpoints/s3.py Normal file
View File

@ -0,0 +1,14 @@
from fastapi import APIRouter, status
from fastapi import Request
from services.s3Service import upload_file as upload_file_service
from schemas.ApiResponse import ApiResponse
from schemas.CreateSchemas import S3Create
router = APIRouter()
@router.post("/")
async def upload_file(data:S3Create):
resp = await upload_file_service(data.folder, data.file_name)
return ApiResponse(data=resp, message="File uploaded successfully")

14
apis/endpoints/sns.py Normal file
View File

@ -0,0 +1,14 @@
from typing import Optional
from fastapi import APIRouter, Body, Header
from fastapi import Request
from services.authService import AuthService
from schemas.BaseSchemas import SNSBase
import json
router = APIRouter()
@router.post("/")
async def send_sms(request: Request):
body = await request.body()
body = json.loads(body)
AuthService().blockEmailSNS(body)
return "OK"

47
apis/endpoints/stripe.py Normal file
View File

@ -0,0 +1,47 @@
from fastapi import APIRouter, Depends, Request
from services.stripeServices import StripeServices
from middleware.auth_dependency import auth_required
from schemas.ApiResponse import ApiResponse
router = APIRouter()
stripe_service = StripeServices()
# @router.post("/create-checkout-session")
# async def create_checkout_session(user_id: int):
# return await stripe_service.create_checkout_session(1)
@router.post("/create-subscription-checkout")
async def create_subscription_checkout():
return await stripe_service.create_subscription_checkout(
fees_to_be={
"per_call_charges": 10,
"setup_fees": 100,
"subscription_fees": 100,
"total": 210
},
clinic_id=1,
account_id="acct_1RT1UFPTNqn2kWQ8",
customer_id="cus_SNn49FDltUcSLP"
)
@router.get("/create-stripe-account-link", dependencies=[Depends(auth_required)])
async def create_stripe_account_link(req:Request):
link = await stripe_service.create_stripe_account_link(req.state.user)
return ApiResponse(data=link, message="Stripe account link created successfully")
@router.get("/get-invoice", dependencies=[Depends(auth_required)])
async def get_invoice(req:Request):
invoice_url = await stripe_service.get_invoice(req.state.user)
return ApiResponse(data=invoice_url, message="Invoice URL retrieved successfully")
@router.post("/create-payment-session", dependencies=[Depends(auth_required)])
async def create_payment_session(req:Request):
session = await stripe_service.create_payment_session(req.state.user)
return ApiResponse(data=session, message="Payment session created successfully")
@router.post("/webhook")
async def stripe_webhook(request: Request):
await stripe_service.handle_webhook(request)
return "OK"

65
apis/endpoints/users.py Normal file
View File

@ -0,0 +1,65 @@
from fastapi import APIRouter, Request, Depends
from middleware.auth_dependency import auth_required
from services.userServices import UserServices
from schemas.ApiResponse import ApiResponse
from schemas.UpdateSchemas import UserUpdate
from utils.constants import DEFAULT_LIMIT, DEFAULT_PAGE
router = APIRouter()
@router.get("/")
async def get_users(limit:int = DEFAULT_LIMIT, page:int = DEFAULT_PAGE, search:str = ""):
if page == 0:
page = 1
offset = (page - 1) * limit
user = await UserServices().get_users(limit, offset, search)
return ApiResponse(
data=user,
message="User fetched successfully"
)
@router.get("/me")
async def get_user(request: Request):
user_id = request.state.user["id"]
user = await UserServices().get_user(user_id)
return ApiResponse(
data=user,
message="User fetched successfully"
)
@router.get("/{user_id}")
async def get_user(request: Request, user_id: int):
user = await UserServices().get_user(user_id)
return ApiResponse(
data=user,
message="User fetched successfully"
)
@router.delete("/")
async def delete_user(request: Request):
user_id = request.state.user["id"]
await UserServices().delete_user(user_id)
return ApiResponse(
data="OK",
message="User deleted successfully"
)
@router.put("/")
async def update_user(request: Request, user_data: UserUpdate):
user_id = request.state.user["id"]
user = await UserServices().update_user(user_id, user_data)
return ApiResponse(
data=user,
message="User updated successfully"
)
@router.put("/{user_id}")
async def update_user(request: Request, user_id: int, user_data: UserUpdate):
user = await UserServices().update_user(user_id, user_data)
return ApiResponse(
data=user,
message="User updated successfully"
)

41
database.py Normal file
View File

@ -0,0 +1,41 @@
import dotenv
dotenv.load_dotenv()
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
import os
import logging
from sqlalchemy.exc import SQLAlchemyError
engine = create_engine(
os.getenv("DB_URL"),
pool_pre_ping=True,
echo=True if os.getenv("IS_DEV") == "True" else False, # Disable in production - this uses memory
connect_args={
"sslmode": "require" if os.getenv("IS_DEV") == "False" else "disable",
"connect_timeout": 10, # Connection timeout
},
)
Base = declarative_base() # Base class for ORM models
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
def get_db():
db = SessionLocal()
try:
yield db
db.commit() # Explicit commit
except SQLAlchemyError as e:
db.rollback()
logging.error(f"Database error: {e}")
raise
except Exception as e:
db.rollback()
logging.error(f"Unexpected error: {e}")
raise
finally:
db.close()

0
enums/__init__.py Normal file
View File

46
enums/enums.py Normal file
View File

@ -0,0 +1,46 @@
from enum import Enum
class AppointmentStatus(Enum):
PENDING = "pending"
CONFIRMED = "confirmed"
CANCELLED = "cancelled"
COMPLETED = "completed"
class Integration(Enum):
BP = "bp"
MEDICAL_DIRECTOR = "medical_director"
class ClinicStatus(Enum):
ACTIVE = "active"
INACTIVE = "inactive"
UNDER_REVIEW = "under_review"
REQUESTED_DOC = "requested_doc"
REJECTED = "rejected"
PAYMENT_DUE = "payment_due"
SUBSCRIPTION_ENDED = "subscription_ended"
class ClinicUserRoles(Enum):
DIRECTOR = "director"
PRACTICE_MANAGER = "practice_manager"
class ClinicDoctorStatus(Enum):
ACTIVE = "active"
INACTIVE = "inactive"
class ClinicDoctorType(Enum):
DOCTOR = "doctor"
NURSE = "nurse"
class UserType(Enum):
SUPER_ADMIN = "super_admin"
CLINIC_ADMIN = "clinic_admin"
class Integration(Enum):
BP = "bp"
MEDICAL_DIRECTOR = "medical_director"
class S3FolderNameEnum(str, Enum):
PROFILE = "profile"
ASSETS = "assets"

View File

@ -1,33 +0,0 @@
import js from '@eslint/js'
import globals from 'globals'
import reactHooks from 'eslint-plugin-react-hooks'
import reactRefresh from 'eslint-plugin-react-refresh'
export default [
{ ignores: ['dist'] },
{
files: ['**/*.{js,jsx}'],
languageOptions: {
ecmaVersion: 2020,
globals: globals.browser,
parserOptions: {
ecmaVersion: 'latest',
ecmaFeatures: { jsx: true },
sourceType: 'module',
},
},
plugins: {
'react-hooks': reactHooks,
'react-refresh': reactRefresh,
},
rules: {
...js.configs.recommended.rules,
...reactHooks.configs.recommended.rules,
'no-unused-vars': ['error', { varsIgnorePattern: '^[A-Z_]' }],
'react-refresh/only-export-components': [
'warn',
{ allowConstantExport: true },
],
},
},
]

17
exceptions/__init__.py Normal file
View File

@ -0,0 +1,17 @@
from .api_exceptions import ApiException
from .business_exception import BusinessValidationException
from .validation_exception import ValidationException
from .forbidden_exception import ForbiddenException
from .internal_server_error_exception import InternalServerErrorException
from .resource_not_found_exception import ResourceNotFoundException
from .unauthorized_exception import UnauthorizedException
__all__ = [
"ApiException",
"BusinessValidationException",
"ValidationException",
"ForbiddenException",
"InternalServerErrorException",
"ResourceNotFoundException",
"UnauthorizedException",
]

View File

@ -0,0 +1,7 @@
class ApiException(Exception):
"""Base API exception class for HTTP errors."""
def __init__(self, status_code: int, message: str):
self.status_code = status_code
self.message = message
super().__init__(self.message)

View File

@ -0,0 +1,6 @@
class BusinessValidationException(Exception):
"""Exception for business logic validation errors."""
def __init__(self, message: str):
self.message = message
super().__init__(self.message)

129
exceptions/db_exceptions.py Normal file
View File

@ -0,0 +1,129 @@
from loguru import logger
from sqlalchemy.exc import SQLAlchemyError
from .business_exception import BusinessValidationException
from .resource_not_found_exception import ResourceNotFoundException
from .validation_exception import ValidationException
class DBExceptionHandler:
"""
Centralized handler for database exceptions.
This class provides methods to handle and transform database exceptions
into application-specific exceptions with user-friendly messages.
"""
@staticmethod
def _extract_detail_message(e):
"""
Extract the detailed error message from a database exception.
Args:
e: The exception to extract the message from
Returns:
str: The detailed error message if found, None otherwise
"""
if hasattr(e, 'args') and e.args and '\nDETAIL:' in str(e.args[0]):
# Extract just the part after 'DETAIL:'
detailed_message = str(e.args[0]).split('\nDETAIL:')[1].strip()
# Clean up any trailing newlines or other characters
detailed_message = detailed_message.split('\n')[0].strip()
return detailed_message
return None
@staticmethod
def handle_exception(e, context="database operation"):
"""
Handle database exceptions and convert them to application-specific exceptions.
Args:
e: The exception to handle
context: A string describing the context of the operation (for logging)
Raises:
BusinessValidationException: With a user-friendly message
ValidationException: If the original exception was a ValidationException
ResourceNotFoundException: If the original exception was a ResourceNotFoundException
"""
logger.error(f"Error during {context}: {str(e)}")
# Pass through our custom exceptions
if isinstance(e, ValidationException):
raise ValidationException(e.message)
if isinstance(e, ResourceNotFoundException):
raise ResourceNotFoundException(e.message)
if isinstance(e, BusinessValidationException):
raise BusinessValidationException(e.message)
# Handle SQLAlchemy errors
if isinstance(e, SQLAlchemyError):
# Check for PostgreSQL unique constraint violations
if hasattr(e, '__cause__') and hasattr(e.__cause__, '__class__') and e.__cause__.__class__.__name__ == 'UniqueViolation':
# Try to extract the detailed error message
detailed_message = DBExceptionHandler._extract_detail_message(e)
if detailed_message:
raise BusinessValidationException(detailed_message)
# Fallback to extracting field from the error if we couldn't get the detailed message
field = None
if 'Key (' in str(e.__cause__) and ')=' in str(e.__cause__):
field = str(e.__cause__).split('Key (')[1].split(')=')[0]
# Generic message if we couldn't extract a better one
raise BusinessValidationException(f"A record with this {field or 'information'} already exists")
# Handle foreign key constraint violations
elif hasattr(e, '__cause__') and hasattr(e.__cause__, '__class__') and e.__cause__.__class__.__name__ == 'ForeignKeyViolation':
# Try to extract detailed message
detailed_message = DBExceptionHandler._extract_detail_message(e)
if detailed_message:
raise BusinessValidationException(detailed_message)
raise BusinessValidationException(f"Referenced record does not exist")
# Handle check constraint violations
elif hasattr(e, '__cause__') and hasattr(e.__cause__, '__class__') and e.__cause__.__class__.__name__ == 'CheckViolation':
# Try to extract detailed message
detailed_message = DBExceptionHandler._extract_detail_message(e)
if detailed_message:
raise BusinessValidationException(detailed_message)
raise BusinessValidationException(f"Invalid data: failed validation check")
# Handle not null constraint violations
elif hasattr(e, '__cause__') and hasattr(e.__cause__, '__class__') and e.__cause__.__class__.__name__ == 'NotNullViolation':
# Try to extract detailed message
detailed_message = DBExceptionHandler._extract_detail_message(e)
if detailed_message:
raise BusinessValidationException(detailed_message)
# Fallback to extracting field name
field = None
if 'column "' in str(e.__cause__) and '" violates' in str(e.__cause__):
field = str(e.__cause__).split('column "')[1].split('" violates')[0]
raise BusinessValidationException(f"Required field {field or ''} cannot be empty")
# Generic SQLAlchemy error
elif hasattr(e, '__cause__') and hasattr(e.__cause__, '__class__') and e.__cause__.__class__.__name__ == 'IntegrityError':
# Try to extract detailed message
detailed_message = DBExceptionHandler._extract_detail_message(e)
if detailed_message:
raise BusinessValidationException(detailed_message)
raise BusinessValidationException(f"Database error: {e.__class__.__name__}")
# Handle unique constraint violations (redundant with first check, but keeping for safety)
elif hasattr(e, '__cause__') and hasattr(e.__cause__, '__class__') and e.__cause__.__class__.__name__ == 'UniqueViolation':
# Try to extract detailed message
detailed_message = DBExceptionHandler._extract_detail_message(e)
if detailed_message:
raise BusinessValidationException(detailed_message)
raise BusinessValidationException(f"A record with this information already exists")
# Handle other database errors
elif hasattr(e, '__cause__') and hasattr(e.__cause__, '__class__') and e.__cause__.__class__.__name__ == 'OperationalError':
# Try to extract detailed message
detailed_message = DBExceptionHandler._extract_detail_message(e)
if detailed_message:
raise BusinessValidationException(detailed_message)
raise BusinessValidationException(f"Database error: {e.__class__.__name__}")
# For any other exceptions, provide a generic message
raise BusinessValidationException(f"An error occurred: {e.__class__.__name__}")

View File

@ -0,0 +1,9 @@
from http import HTTPStatus
from .api_exceptions import ApiException
class ForbiddenException(ApiException):
"""Exception for forbidden access errors."""
def __init__(self, message: str = "Forbidden"):
super().__init__(HTTPStatus.FORBIDDEN, message)

View File

@ -0,0 +1,12 @@
from http import HTTPStatus
from .api_exceptions import ApiException
class InternalServerErrorException(ApiException):
"""Exception for internal server errors."""
def __init__(self):
super().__init__(
HTTPStatus.INTERNAL_SERVER_ERROR,
"An unexpected error has occurred. Please contact the administrator."
)

View File

@ -0,0 +1,9 @@
from http import HTTPStatus
from .api_exceptions import ApiException
class ResourceNotFoundException(ApiException):
"""Exception for resource not found errors."""
def __init__(self, message: str):
super().__init__(HTTPStatus.NOT_FOUND, message)

View File

@ -0,0 +1,9 @@
from http import HTTPStatus
from .api_exceptions import ApiException
class UnauthorizedException(ApiException):
"""Exception for unauthorized access errors."""
def __init__(self, message: str = "Failed to authenticate."):
super().__init__(HTTPStatus.UNAUTHORIZED, message)

View File

@ -0,0 +1,6 @@
class ValidationException(Exception):
"""Exception for data validation errors."""
def __init__(self, message: str):
self.message = message
super().__init__(self.message)

View File

@ -1,16 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Health Apps</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.jsx"></script>
</body>
</html>

View File

@ -0,0 +1,6 @@
from pydantic import BaseModel
from typing import Any
class CommonResponse(BaseModel):
data: Any
total: int

104
main.py Normal file
View File

@ -0,0 +1,104 @@
import os
import dotenv
dotenv.load_dotenv()
from fastapi import FastAPI
from contextlib import asynccontextmanager
import logging
from fastapi.middleware.cors import CORSMiddleware
from fastapi.security import HTTPBearer
import stripe
# db
from database import Base, engine
# routers
from apis import api_router
# middleware
from middleware.ErrorHandlerMiddleware import ErrorHandlerMiddleware, configure_exception_handlers
from middleware.CustomRequestTypeMiddleware import TextPlainMiddleware
from services.emailService import EmailService
# Configure logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)
STRIPE_SECRET_KEY = os.getenv("STRIPE_SECRET_KEY")
STRIPE_WEBHOOK_SECRET = os.getenv("STRIPE_WEBHOOK_SECRET")
@asynccontextmanager
async def lifespan(app: FastAPI):
logger.info("Starting application")
try:
Base.metadata.create_all(bind=engine)
logger.info("Created database tables")
if STRIPE_SECRET_KEY is None or STRIPE_WEBHOOK_SECRET is None:
raise ValueError("Stripe API key or webhook secret is not set")
stripe.api_key = STRIPE_SECRET_KEY
stripe.api_version = "2025-04-30.basil"
logger.info("Stripe API key set")
# Test Stripe connection
try:
account = stripe.Account.retrieve()
logger.info(f"Stripe connection verified - Account ID: {account.id}")
except stripe.error.AuthenticationError as e:
logger.error(f"Stripe authentication failed: {e}")
raise
except stripe.error.StripeError as e:
logger.error(f"Stripe connection test failed: {e}")
raise
except Exception as e:
logger.error(f"Error creating database tables: {e}")
raise e
yield
logger.info("Stopping application")
# Define the security scheme
bearer_scheme = HTTPBearer(scheme_name="Bearer Authentication")
app = FastAPI(
lifespan=lifespan,
title="Twillio Voice API",
description="API for Twillio Voice application",
version="1.0.0",
swagger_ui_parameters={"defaultModelsExpandDepth": -1}
)
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
expose_headers=["*"],
)
# Custom request type middleware
app.add_middleware(TextPlainMiddleware)
# Error handler middleware
app.add_middleware(ErrorHandlerMiddleware)
# Configure exception handlers
configure_exception_handlers(app)
@app.get("/")
async def hello_world():
# email_service = EmailService()
# email_service.createTemplate()
return {"Hello": "World"}
# Routes
app.include_router(api_router, prefix="/api")

View File

@ -0,0 +1,41 @@
from fastapi import FastAPI, Request, Response
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.types import ASGIApp
class TextPlainMiddleware(BaseHTTPMiddleware):
def __init__(self, app: ASGIApp):
super().__init__(app)
async def dispatch(self, request: Request, call_next):
# Check if content type is text/*
content_type = request.headers.get("content-type", "")
if content_type and content_type.startswith("text/"):
# Store the original receive method
original_receive = request._receive
# Create a modified receive that will store the body
body = b""
async def receive():
nonlocal body
message = await original_receive()
if message["type"] == "http.request":
body += message.get("body", b"")
# Update body to be empty so it won't be processed by other middleware
# message["body"] = b""
return message
# Replace the receive method
request._receive = receive
# Process the request
response = await call_next(request)
# After the response is generated, we can access the full body
# and attach it to the request state for the route to access
request.state.text_body = body.decode("utf-8")
return response
else:
# For other content types, proceed as normal
return await call_next(request)

View File

@ -0,0 +1,124 @@
from fastapi import Request, status
from fastapi.responses import JSONResponse
from fastapi.exceptions import RequestValidationError
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.exceptions import HTTPException as StarletteHTTPException
import traceback
from exceptions import (
ApiException,
BusinessValidationException,
ValidationException
)
from schemas.ApiResponse import ApiResponse
class ErrorHandlerMiddleware(BaseHTTPMiddleware):
"""Middleware for handling exceptions globally in the application."""
async def dispatch(self, request: Request, call_next):
try:
return await call_next(request)
except Exception as exc:
return self.handle_exception(exc)
def handle_exception(self, exc: Exception) -> JSONResponse:
if isinstance(exc, ApiException):
return JSONResponse(
status_code=exc.status_code,
content=ApiResponse.from_api_exception(exc)
)
elif isinstance(exc, StarletteHTTPException):
return JSONResponse(
status_code=exc.status_code,
content=ApiResponse(
message=str(exc.detail),
error=str(traceback.format_exc())
).model_dump(exclude_none=True)
)
elif isinstance(exc, (ValidationException, BusinessValidationException)):
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content=ApiResponse(
message=str(exc),
error=str(traceback.format_exc())
).model_dump(exclude_none=True)
)
elif isinstance(exc, RequestValidationError):
return JSONResponse(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content=ApiResponse(
message="Validation error",
error=str(exc.errors())
).model_dump(exclude_none=True)
)
else:
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content=ApiResponse(
message=str(exc),
error=str(traceback.format_exc())
).model_dump(exclude_none=True)
)
# Exception handlers for FastAPI
def configure_exception_handlers(app):
"""Configure exception handlers for the FastAPI application."""
@app.exception_handler(ApiException)
async def api_exception_handler(request: Request, exc: ApiException):
return JSONResponse(
status_code=exc.status_code,
content=ApiResponse.from_api_exception(exc)
)
@app.exception_handler(RequestValidationError)
async def validation_exception_handler(request: Request, exc: RequestValidationError):
return JSONResponse(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content=ApiResponse(
message="Validation error",
error=str(exc.errors())
).model_dump(exclude_none=True)
)
@app.exception_handler(StarletteHTTPException)
async def http_exception_handler(request: Request, exc: StarletteHTTPException):
return JSONResponse(
status_code=exc.status_code,
content=ApiResponse(
message=str(exc.detail),
error=str(traceback.format_exc())
).model_dump(exclude_none=True)
)
@app.exception_handler(BusinessValidationException)
async def business_validation_exception_handler(request: Request, exc: BusinessValidationException):
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content=ApiResponse(
message=str(exc),
error=str(traceback.format_exc())
).model_dump(exclude_none=True)
)
@app.exception_handler(ValidationException)
async def custom_validation_exception_handler(request: Request, exc: ValidationException):
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content=ApiResponse(
message=str(exc),
error=str(traceback.format_exc())
).model_dump(exclude_none=True)
)
@app.exception_handler(Exception)
async def general_exception_handler(request: Request, exc: Exception):
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content=ApiResponse(
message=str(exc),
error=str(traceback.format_exc())
).model_dump(exclude_none=True)
)

0
middleware/__init__.py Normal file
View File

View File

@ -0,0 +1,27 @@
from fastapi import HTTPException, Depends
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from services.jwtService import verify_jwt_token
from services.userServices import UserServices
from fastapi import Request
security = HTTPBearer()
async def auth_required(request: Request ,credentials: HTTPAuthorizationCredentials = Depends(security)):
"""
Dependency function to verify JWT token for protected routes
"""
if credentials.scheme != "Bearer":
raise HTTPException(status_code=401, detail="Invalid authentication scheme")
payload = verify_jwt_token(credentials.credentials)
if payload is None:
raise HTTPException(status_code=401, detail="Invalid authentication token")
# Get user from database
user = await UserServices().get_user(payload["id"])
# set user to request state
request.state.user = user
request.state.payload = payload
return True

29
middleware/auth_secret.py Normal file
View File

@ -0,0 +1,29 @@
"""
Authentication middleware and dependency for agent (bot) requests.
Validates the presence and correctness of the X-Agent-Secret header.
"""
import os
from fastapi import HTTPException, status, Header
from typing import Optional
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
# Get the secret key from environment variables
AGENT_SECRET_KEY = os.getenv("AGENT_SECRET_KEY")
if not AGENT_SECRET_KEY:
raise ValueError("AGENT_SECRET_KEY environment variable not set")
async def verify_secret(x_agent_secret: Optional[str] = Header(None, alias="X-Agent-Secret")):
"""
Dependency function to verify the X-Agent-Secret header.
Can be used with Depends() in FastAPI route dependencies.
"""
if not x_agent_secret or x_agent_secret != AGENT_SECRET_KEY:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid or missing X-Agent-Secret header",
headers={"WWW-Authenticate": "Bearer"},
)
return True

1
migrations/README Normal file
View File

@ -0,0 +1 @@
Generic single-database configuration.

83
migrations/env.py Normal file
View File

@ -0,0 +1,83 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
from database import Base, engine
from models import *
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

28
migrations/script.py.mako Normal file
View File

@ -0,0 +1,28 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,127 @@
"""fix enum clinic doctor
Revision ID: 0ce7107c1910
Revises: a3a9b7d17bdd
Create Date: 2025-05-13 12:20:49.384154
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy import text
# revision identifiers, used by Alembic.
revision: str = '0ce7107c1910'
down_revision: Union[str, None] = 'a3a9b7d17bdd'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# Check if the enum type exists before creating it
conn = op.get_bind()
result = conn.execute(text("SELECT EXISTS(SELECT 1 FROM pg_type WHERE typname = 'clinicdoctortype')"))
enum_exists = result.scalar()
if not enum_exists:
# Create the new enum type if it doesn't exist
op.execute(text("CREATE TYPE clinicdoctortype AS ENUM ('DOCTOR', 'NURSE')"))
# Check if the new_role column already exists
inspector = sa.inspect(conn)
columns = inspector.get_columns('clinic_doctors')
column_names = [column['name'] for column in columns]
if 'new_role' not in column_names:
# Add a temporary column with the new type
op.add_column('clinic_doctors', sa.Column('new_role', postgresql.ENUM('DOCTOR', 'NURSE', name='clinicdoctortype'), nullable=True))
# Update the temporary column with values based on the old column
op.execute(text("UPDATE clinic_doctors SET new_role = 'DOCTOR' WHERE role = 'DIRECTOR'"))
op.execute(text("UPDATE clinic_doctors SET new_role = 'NURSE' WHERE role = 'PRACTICE_MANAGER'"))
# Drop the old column and rename the new one
op.drop_column('clinic_doctors', 'role')
op.alter_column('clinic_doctors', 'new_role', new_column_name='role')
# We need to handle the users table that depends on the clinicuserroles enum
# First check if the users table has a clinicRole column that uses the enum
has_clinic_role = False
try:
user_columns = inspector.get_columns('users')
for column in user_columns:
if column['name'] == 'clinicRole':
has_clinic_role = True
break
except:
# Table might not exist
pass
if has_clinic_role:
# We need to update the users table to not use the enum before dropping it
# First, create a temporary column with a string type
op.add_column('users', sa.Column('temp_clinic_role', sa.String(), nullable=True))
# Copy the values - use double quotes to preserve case sensitivity in PostgreSQL
op.execute(text('UPDATE users SET temp_clinic_role = "clinicRole"::text'))
# Drop the old column and rename the new one
op.drop_column('users', 'clinicRole')
op.alter_column('users', 'temp_clinic_role', new_column_name='clinicRole')
# Now we can safely drop the old enum type
op.execute(text("DROP TYPE IF EXISTS clinicuserroles"))
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# Create the old enum type
op.execute(text("CREATE TYPE clinicuserroles AS ENUM ('DIRECTOR', 'PRACTICE_MANAGER')"))
# Add a temporary column with the old type
op.add_column('clinic_doctors', sa.Column('old_role', postgresql.ENUM('DIRECTOR', 'PRACTICE_MANAGER', name='clinicuserroles'), nullable=True))
# Update the temporary column with values based on the new column
op.execute(text("UPDATE clinic_doctors SET old_role = 'DIRECTOR' WHERE role = 'DOCTOR'"))
op.execute(text("UPDATE clinic_doctors SET old_role = 'PRACTICE_MANAGER' WHERE role = 'NURSE'"))
# Drop the new column and rename the old one
op.drop_column('clinic_doctors', 'role')
op.alter_column('clinic_doctors', 'old_role', new_column_name='role')
# If we modified the users table in the upgrade, we need to restore it
conn = op.get_bind()
inspector = sa.inspect(conn)
has_clinic_role = False
try:
user_columns = inspector.get_columns('users')
for column in user_columns:
if column['name'] == 'clinicRole':
has_clinic_role = True
break
except:
pass
if has_clinic_role:
# Create a temporary column with the enum type
op.add_column('users', sa.Column('temp_clinic_role', postgresql.ENUM('DIRECTOR', 'PRACTICE_MANAGER', name='clinicuserroles'), nullable=True))
# Copy the values (with appropriate conversion) - use double quotes to preserve case sensitivity
op.execute(text('UPDATE users SET temp_clinic_role = "clinicRole"::clinicuserroles'))
# Drop the old column and rename the new one
op.drop_column('users', 'clinicRole')
op.alter_column('users', 'temp_clinic_role', new_column_name='clinicRole')
# Drop the new enum type
op.execute(text("DROP TYPE IF EXISTS clinicdoctortype"))
# ### end Alembic commands ###
# ### end Alembic commands ###

View File

@ -0,0 +1,34 @@
"""clinic-user-relation
Revision ID: 402a9152a6fc
Revises: ac71b9a4b040
Create Date: 2025-05-15 12:09:28.050689
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '402a9152a6fc'
down_revision: Union[str, None] = 'ac71b9a4b040'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('clinics', sa.Column('creator_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'clinics', 'users', ['creator_id'], ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'clinics', type_='foreignkey')
op.drop_column('clinics', 'creator_id')
# ### end Alembic commands ###

View File

@ -0,0 +1,35 @@
"""updated_user_table2
Revision ID: 48785e34c37c
Revises: 7d1c821a7e05
Create Date: 2025-05-21 13:54:54.833038
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '48785e34c37c'
down_revision: Union[str, None] = '7d1c821a7e05'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# op.drop_index('ix_users_username', table_name='users')
# op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=False)
pass
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_users_username'), table_name='users')
op.create_index('ix_users_username', 'users', ['username'], unique=True)
# ### end Alembic commands ###

View File

@ -0,0 +1,33 @@
"""file-verification-table
Revision ID: 497238c0338d
Revises: ad47f4af583e
Create Date: 2025-05-19 16:34:54.211429
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '497238c0338d'
down_revision: Union[str, None] = 'ad47f4af583e'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# op.add_column('clinic_file_verifications', sa.Column('rejection_reason', sa.String(length=255), nullable=True))
pass
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('clinic_file_verifications', 'rejection_reason')
# ### end Alembic commands ###

View File

@ -0,0 +1,37 @@
"""updated_enums_clinic_status
Revision ID: 5ed8ac3d258c
Revises: a19fede0cdc6
Create Date: 2025-06-02 11:11:56.589321
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '5ed8ac3d258c'
down_revision: Union[str, None] = 'a19fede0cdc6'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# Add new status to clinicstatus enum
op.execute("ALTER TYPE clinicstatus ADD VALUE IF NOT EXISTS 'SUBSCRIPTION_ENDED' AFTER 'PAYMENT_DUE'")
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# Note: In PostgreSQL, you cannot directly remove an enum value.
# You would need to create a new enum type, update the column to use the new type,
# and then drop the old type. This is a complex operation and might not be needed.
# The upgrade will be reverted when applying previous migrations.
pass
# ### end Alembic commands ###

View File

@ -0,0 +1,32 @@
"""updated_user_table
Revision ID: 7d1c821a7e05
Revises: ec157808ef2a
Create Date: 2025-05-21 13:51:39.680812
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '7d1c821a7e05'
down_revision: Union[str, None] = 'ec157808ef2a'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('users', 'mobile', nullable=True)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('users', 'mobile', nullable=False)
# ### end Alembic commands ###

View File

@ -0,0 +1,32 @@
"""updated clinic enum status
Revision ID: 827c736d4aeb
Revises: 928001a9d80f
Create Date: 2025-05-12 15:36:42.117900
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '827c736d4aeb'
down_revision: Union[str, None] = '928001a9d80f'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# Update the enum type to replace 'requested_doctor' with 'requested_doc'
op.execute("ALTER TYPE clinicstatus RENAME VALUE 'REQUESTED_DOCTOR' TO 'REQUESTED_DOC'")
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# Revert the enum type change back to 'requested_doctor' from 'requested_doc'
op.execute("ALTER TYPE clinicstatus RENAME VALUE 'REQUESTED_DOC' TO 'REQUESTED_DOCTOR'")
# ### end Alembic commands ###

View File

@ -0,0 +1,42 @@
"""updated_subscription_table
Revision ID: 8d19e726b997
Revises: 5ed8ac3d258c
Create Date: 2025-06-03 19:07:17.107577
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '8d19e726b997'
down_revision: Union[str, None] = '5ed8ac3d258c'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('subscriptions', sa.Column('total', sa.String(), nullable=True))
op.add_column('subscriptions', sa.Column('setup_fee', sa.String(), nullable=True))
op.add_column('subscriptions', sa.Column('subscription_fee', sa.String(), nullable=True))
op.add_column('subscriptions', sa.Column('per_call_charge', sa.String(), nullable=True))
op.drop_index('ix_subscriptions_session_id', table_name='subscriptions')
op.drop_column('subscriptions', 'session_id')
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('subscriptions', sa.Column('session_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True))
op.create_index('ix_subscriptions_session_id', 'subscriptions', ['session_id'], unique=False)
op.drop_column('subscriptions', 'per_call_charge')
op.drop_column('subscriptions', 'subscription_fee')
op.drop_column('subscriptions', 'setup_fee')
op.drop_column('subscriptions', 'total')
# ### end Alembic commands ###

View File

@ -0,0 +1,110 @@
"""updated clinic
Revision ID: 928001a9d80f
Revises:
Create Date: 2025-05-12 14:19:15.351582
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '928001a9d80f'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('appointments', sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('calenders', sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('clinics', sa.Column('emergency_phone', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('fax', sa.String(), nullable=True))
# Create the integration enum type first
integration_type = sa.Enum('BP', 'MEDICAL_DIRECTOR', name='integration')
integration_type.create(op.get_bind())
op.add_column('clinics', sa.Column('integration', integration_type, nullable=True))
op.add_column('clinics', sa.Column('pms_id', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('practice_name', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('logo', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('country', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('postal_code', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('city', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('state', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('abn_doc', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('abn_number', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('contract_doc', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('clinic_phone', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('is_clinic_phone_enabled', sa.Boolean(), nullable=True))
op.add_column('clinics', sa.Column('other_info', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('greeting_msg', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('voice_model', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('voice_model_provider', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('voice_model_gender', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('scenarios', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('general_info', sa.String(), nullable=True))
# Create the clinicstatus enum type first
clinic_status_type = sa.Enum('ACTIVE', 'INACTIVE', 'UNDER_REVIEW', 'REQUESTED_DOCTOR', 'REJECTED', 'PAYMENT_DUE', name='clinicstatus')
clinic_status_type.create(op.get_bind())
op.add_column('clinics', sa.Column('status', clinic_status_type, nullable=True))
op.add_column('clinics', sa.Column('domain', sa.String(), nullable=True))
op.add_column('clinics', sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True))
op.create_unique_constraint(None, 'clinics', ['emergency_phone'])
op.add_column('doctors', sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('patients', sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('users', sa.Column('profile_pic', sa.String(), nullable=True))
op.add_column('users', sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'deleted_at')
op.drop_column('users', 'profile_pic')
op.drop_column('patients', 'deleted_at')
op.drop_column('doctors', 'deleted_at')
op.drop_constraint(None, 'clinics', type_='unique')
op.drop_column('clinics', 'deleted_at')
op.drop_column('clinics', 'domain')
# Drop the status column first
op.drop_column('clinics', 'status')
# Then drop the enum type
sa.Enum(name='clinicstatus').drop(op.get_bind())
op.drop_column('clinics', 'general_info')
op.drop_column('clinics', 'scenarios')
op.drop_column('clinics', 'voice_model_gender')
op.drop_column('clinics', 'voice_model_provider')
op.drop_column('clinics', 'voice_model')
op.drop_column('clinics', 'greeting_msg')
op.drop_column('clinics', 'other_info')
op.drop_column('clinics', 'is_clinic_phone_enabled')
op.drop_column('clinics', 'clinic_phone')
op.drop_column('clinics', 'contract_doc')
op.drop_column('clinics', 'abn_number')
op.drop_column('clinics', 'abn_doc')
op.drop_column('clinics', 'state')
op.drop_column('clinics', 'city')
op.drop_column('clinics', 'postal_code')
op.drop_column('clinics', 'country')
op.drop_column('clinics', 'logo')
op.drop_column('clinics', 'practice_name')
op.drop_column('clinics', 'pms_id')
# Drop the integration column first
op.drop_column('clinics', 'integration')
# Then drop the enum type
sa.Enum(name='integration').drop(op.get_bind())
op.drop_column('clinics', 'fax')
op.drop_column('clinics', 'emergency_phone')
op.drop_column('calenders', 'deleted_at')
op.drop_column('appointments', 'deleted_at')
# ### end Alembic commands ###

View File

@ -0,0 +1,38 @@
"""relation update
Revision ID: a19fede0cdc6
Revises: 48785e34c37c
Create Date: 2025-05-26 18:37:53.781411
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'a19fede0cdc6'
down_revision: Union[str, None] = '48785e34c37c'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('appointment_relations_clinic_doctor_id_fkey', 'appointment_relations', type_='foreignkey')
op.create_foreign_key(None, 'appointment_relations', 'clinic_doctors', ['clinic_doctor_id'], ['id'], ondelete='CASCADE')
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('users', 'clinicRole',
existing_type=sa.Enum('DIRECTOR', 'PRACTICE_MANAGER', name='clinicuserroles'),
type_=sa.VARCHAR(),
existing_nullable=True)
op.drop_constraint(None, 'appointment_relations', type_='foreignkey')
op.create_foreign_key('appointment_relations_clinic_doctor_id_fkey', 'appointment_relations', 'clinic_doctors', ['clinic_doctor_id'], ['id'])
# ### end Alembic commands ###

View File

@ -0,0 +1,33 @@
"""appointment relation table
Revision ID: a3a9b7d17bdd
Revises: 827c736d4aeb
Create Date: 2025-05-13 11:09:24.512689
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'a3a9b7d17bdd'
down_revision: Union[str, None] = '827c736d4aeb'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
op.add_column('clinic_doctors', sa.Column('clinic_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'clinic_doctors', 'clinics', ['clinic_id'], ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'clinic_doctors', type_='foreignkey')
op.drop_column('clinic_doctors', 'clinic_id')
# ### end Alembic commands ###

View File

@ -0,0 +1,51 @@
"""notification table
Revision ID: ac71b9a4b040
Revises: 0ce7107c1910
Create Date: 2025-05-14 16:14:23.750891
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'ac71b9a4b040'
down_revision: Union[str, None] = '0ce7107c1910'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('notifications',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=True),
sa.Column('message', sa.String(), nullable=True),
sa.Column('is_read', sa.Boolean(), nullable=True),
sa.Column('sender_id', sa.Integer(), nullable=False),
sa.Column('receiver_id', sa.Integer(), nullable=False),
sa.Column('create_time', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('update_time', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['receiver_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['sender_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_notifications_id'), 'notifications', ['id'], unique=False)
op.create_index(op.f('ix_notifications_receiver_id'), 'notifications', ['receiver_id'], unique=False)
op.create_index(op.f('ix_notifications_sender_id'), 'notifications', ['sender_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_notifications_sender_id'), table_name='notifications')
op.drop_index(op.f('ix_notifications_receiver_id'), table_name='notifications')
op.drop_index(op.f('ix_notifications_id'), table_name='notifications')
op.drop_table('notifications')
# ### end Alembic commands ###

View File

@ -0,0 +1,32 @@
"""user
Revision ID: ad47f4af583e
Revises: 402a9152a6fc
Create Date: 2025-05-15 16:40:24.114531
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'ad47f4af583e'
down_revision: Union[str, None] = '402a9152a6fc'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('mobile', sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'mobile')
# ### end Alembic commands ###

View File

@ -0,0 +1,38 @@
"""updated_stripeuser
Revision ID: e50edac1c8f0
Revises: 8d19e726b997
Create Date: 2025-06-05 18:22:38.502127
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'e50edac1c8f0'
down_revision: Union[str, None] = '8d19e726b997'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('stripe_users', sa.Column('clinic_id', sa.Integer(), nullable=True))
op.drop_constraint('stripe_users_user_id_key', 'stripe_users', type_='unique')
op.drop_constraint('stripe_users_user_id_fkey', 'stripe_users', type_='foreignkey')
op.create_foreign_key(None, 'stripe_users', 'clinics', ['clinic_id'], ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'stripe_users', type_='foreignkey')
op.create_foreign_key('stripe_users_user_id_fkey', 'stripe_users', 'users', ['user_id'], ['id'])
op.create_unique_constraint('stripe_users_user_id_key', 'stripe_users', ['user_id'])
op.drop_column('stripe_users', 'clinic_id')
# ### end Alembic commands ###

View File

@ -0,0 +1,33 @@
"""file-verification-table
Revision ID: ec157808ef2a
Revises: 497238c0338d
Create Date: 2025-05-19 17:16:52.137111
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'ec157808ef2a'
down_revision: Union[str, None] = '497238c0338d'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# op.add_column('clinic_file_verifications', sa.Column('logo_is_verified', sa.Boolean(), nullable=True))
pass
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('clinic_file_verifications', 'logo_is_verified')
# ### end Alembic commands ###

View File

@ -0,0 +1,14 @@
from sqlalchemy import Column, ForeignKey, Integer
from database import Base
from .CustomBase import CustomBase
from sqlalchemy.orm import relationship
class AppointmentRelations(Base, CustomBase):
__tablename__ = "appointment_relations"
id = Column(Integer, primary_key=True, index=True)
appointment_type_id = Column(Integer, ForeignKey("master_appointment_types.id"))
clinic_doctor_id = Column(Integer, ForeignKey("clinic_doctors.id", ondelete="CASCADE"))
clinicDoctors = relationship("ClinicDoctors", back_populates="appointmentRelations")
masterAppointmentTypes = relationship("MasterAppointmentTypes", back_populates="appointmentRelations")

20
models/Appointments.py Normal file
View File

@ -0,0 +1,20 @@
from sqlalchemy import Column, DateTime, Enum, Integer, ForeignKey
from sqlalchemy.orm import relationship
from enums.enums import AppointmentStatus
from database import Base
from .CustomBase import CustomBase
class Appointments(Base, CustomBase):
__tablename__ = "appointments"
id = Column(Integer, primary_key=True, index=True)
appointment_time = Column(DateTime)
status = Column(Enum(AppointmentStatus))
doctor_id = Column(Integer, ForeignKey("doctors.id"), index=True)
doctor = relationship("Doctors", back_populates="appointments")
patient_id = Column(Integer, ForeignKey("patients.id"), index=True)
patient = relationship("Patients", back_populates="appointments")

13
models/BlockedEmail.py Normal file
View File

@ -0,0 +1,13 @@
from sqlalchemy import Column, Integer, String
from database import Base
from .CustomBase import CustomBase
class BlockedEmail(Base, CustomBase):
__tablename__ = "blocked_emails"
id = Column(Integer, primary_key=True, index=True)
email = Column(String(255), unique=True, index=True)
reason = Column(String)
severity = Column(String)

16
models/Calendar.py Normal file
View File

@ -0,0 +1,16 @@
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.orm import relationship
from database import Base
from .CustomBase import CustomBase
class Calenders(Base, CustomBase):
__tablename__ = "calenders"
id = Column(Integer, primary_key=True, index=True)
doc_id = Column(Integer, ForeignKey("doctors.id"), nullable=False, index=True)
# rrule = Column(String)
time = Column(String)
doctor = relationship("Doctors", back_populates="calendars")

18
models/CallTranscripts.py Normal file
View File

@ -0,0 +1,18 @@
from sqlalchemy import Column, Integer, String, DateTime
from database import Base
from .CustomBase import CustomBase
class CallTranscripts(Base, CustomBase):
__tablename__ = "call_transcripts"
id = Column(Integer, primary_key=True, index=True)
patient_name = Column(String, nullable=True)
patient_number = Column(String)
call_duration = Column(String)
call_received_time = Column(DateTime(timezone=True))
transcript_key_id = Column(String)
clinic_id = Column(Integer, nullable=True, default=None)

23
models/ClinicDoctors.py Normal file
View File

@ -0,0 +1,23 @@
from sqlalchemy import Column, Enum, Integer, String, ForeignKey, Table
from database import Base
from enums.enums import ClinicDoctorType, ClinicDoctorStatus
from .CustomBase import CustomBase
from sqlalchemy.orm import relationship
class ClinicDoctors(Base, CustomBase):
__tablename__ = "clinic_doctors"
id = Column(Integer, primary_key=True, index=True)
name = Column(String)
role = Column(Enum(ClinicDoctorType))
status = Column(Enum(ClinicDoctorStatus))
appointmentRelations = relationship(
"AppointmentRelations",
back_populates="clinicDoctors",
cascade="all, delete-orphan",
passive_deletes=True,
)
clinic_id = Column(Integer, ForeignKey("clinics.id"))
clinic = relationship("Clinics", back_populates="clinicDoctors")

View File

@ -0,0 +1,18 @@
from database import Base
from sqlalchemy import Column, Integer, Boolean, ForeignKey, String
from .CustomBase import CustomBase
from sqlalchemy.orm import relationship
class ClinicFileVerifications(Base, CustomBase):
__tablename__ = "clinic_file_verifications"
id = Column(Integer, primary_key=True, index=True)
clinic_id = Column(Integer, ForeignKey("clinics.id"), nullable=False)
logo_is_verified = Column(Boolean, default=None, nullable=True)
abn_doc_is_verified = Column(Boolean, default=None, nullable=True)
contract_doc_is_verified = Column(Boolean, default=None, nullable=True)
last_changed_by = Column(Integer, ForeignKey("users.id"), nullable=False)
rejection_reason = Column(String(255), nullable=True)
clinic = relationship("Clinics", back_populates="clinic_file_verifications")
last_changed_by_user = relationship("Users", back_populates="clinic_file_verifications")

15
models/ClinicOffers.py Normal file
View File

@ -0,0 +1,15 @@
from sqlalchemy import Column, Integer, Boolean, DateTime, ForeignKey, String
from database import Base
from .CustomBase import CustomBase
from sqlalchemy.orm import relationship
from datetime import datetime
class ClinicOffers(Base,CustomBase):
__tablename__ = "clinic_offers"
id = Column(Integer, primary_key=True, index=True)
clinic_email = Column(String)
setup_fees_waived = Column(Boolean, default=False)
special_offer_for_month = Column(String, nullable=True) # free till specified month

51
models/Clinics.py Normal file
View File

@ -0,0 +1,51 @@
from sqlalchemy import Column, Integer, String, Boolean, ForeignKey
from sqlalchemy.orm import relationship
from database import Base
from enums.enums import Integration, ClinicStatus
from sqlalchemy import Enum
from .CustomBase import CustomBase
class Clinics(Base, CustomBase):
__tablename__ = "clinics"
id = Column(Integer, primary_key=True, index=True)
name = Column(String)
address = Column(String, nullable=True)
phone = Column(String, unique=True, index=True) # clinic phone
emergency_phone = Column(String, unique=True, nullable=True)
email = Column(String, unique=True, index=True, nullable=True)
fax = Column(String, nullable=True)
integration = Column(Enum(Integration))
pms_id = Column(String, nullable=True)
practice_name = Column(String, nullable=True)
logo = Column(String, nullable=True)
country = Column(String, nullable=True)
postal_code = Column(String, nullable=True)
city = Column(String, nullable=True)
state = Column(String, nullable=True)
abn_doc = Column(String, nullable=True)
abn_number = Column(String, nullable=True)
contract_doc = Column(String, nullable=True)
clinic_phone = Column(String, nullable=True) # AI Receptionist Phone
is_clinic_phone_enabled = Column(Boolean, default=False)
other_info = Column(String, nullable=True)
greeting_msg = Column(String, nullable=True)
voice_model = Column(String, nullable=True)
voice_model_provider = Column(String, nullable=True)
voice_model_gender = Column(String, nullable=True)
scenarios = Column(String, nullable=True)
general_info = Column(String, nullable=True)
status = Column(Enum(ClinicStatus))
domain = Column(String, nullable=True) # unique for each clinic
creator_id = Column(Integer, ForeignKey("users.id"), nullable=True) # Reference to the user who created this clinic
# Relationships
doctors = relationship("Doctors", back_populates="clinic")
clinicDoctors = relationship("ClinicDoctors", back_populates="clinic")
creator = relationship("Users", back_populates="created_clinics")
clinic_file_verifications = relationship("ClinicFileVerifications", back_populates="clinic")
# Stripe relationships
stripe_user = relationship("StripeUsers", back_populates="clinic")

47
models/CustomBase.py Normal file
View File

@ -0,0 +1,47 @@
from sqlalchemy import Column, DateTime, event, func, inspect
from sqlalchemy.orm import Query
from database import SessionLocal
from datetime import datetime
class CustomBase:
create_time = Column(DateTime(timezone=True), server_default=func.now())
update_time = Column(
DateTime(timezone=True), server_default=func.now(), onupdate=func.now()
)
deleted_at = Column(DateTime(timezone=True), nullable=True)
def soft_delete(self, session):
"""Mark record as deleted without removing from database"""
self.deleted_at = datetime.now()
session.add(self)
session.commit()
def restore(self, session):
"""Restore a soft-deleted record"""
self.deleted_at = None
session.add(self)
session.commit()
# Global filter for deleted records
@event.listens_for(SessionLocal, "do_orm_execute")
def _add_filtering_criteria(execute_state):
if (
execute_state.is_select
and not execute_state.execution_options.get("include_deleted", False)
):
# Check if any of the entities inherit from CustomBase
for entity in execute_state.statement.column_descriptions:
entity_class = entity.get("entity", None)
if entity_class and issubclass(entity_class, CustomBase):
# Add filter condition to exclude soft-deleted records
execute_state.statement = execute_state.statement.filter(
entity_class.deleted_at.is_(None)
)
break
# Option to include deleted records
class IncludeDeleted(object):
name = 'include_deleted'
def __init__(self):
pass

22
models/Doctors.py Normal file
View File

@ -0,0 +1,22 @@
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.orm import relationship
from database import Base
from .CustomBase import CustomBase
class Doctors(Base, CustomBase):
__tablename__ = "doctors"
id = Column(Integer, primary_key=True, index=True)
name = Column(String)
age = Column(Integer, nullable=True)
email = Column(String, unique=True, index=True, nullable=True)
phone = Column(String, unique=True, index=True)
address = Column(String, nullable=True)
clinic_id = Column(Integer, ForeignKey("clinics.id"), nullable=False, index=True)
clinic = relationship("Clinics", back_populates="doctors")
appointments = relationship("Appointments", back_populates="doctor")
calendars = relationship("Calenders", back_populates="doctor")

15
models/Fcm.py Normal file
View File

@ -0,0 +1,15 @@
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import relationship
from sqlalchemy import ForeignKey
from database import Base
from .CustomBase import CustomBase
class Fcm(Base, CustomBase):
__tablename__ = "fcm"
id = Column(Integer, primary_key=True, index=True)
token = Column(String)
user_id = Column(Integer, ForeignKey("users.id"), nullable=False, index=True)
user = relationship("Users", back_populates="fcm")

View File

@ -0,0 +1,12 @@
from sqlalchemy import Column, Integer, String
from database import Base
from .CustomBase import CustomBase
from sqlalchemy.orm import relationship
class MasterAppointmentTypes(Base, CustomBase):
__tablename__ = "master_appointment_types"
id = Column(Integer, primary_key=True, index=True)
type = Column(String)
appointmentRelations = relationship("AppointmentRelations", back_populates="masterAppointmentTypes")

19
models/Notifications.py Normal file
View File

@ -0,0 +1,19 @@
from sqlalchemy import Boolean, Column, Integer, String, ForeignKey
from sqlalchemy.orm import relationship
from database import Base
from .CustomBase import CustomBase
class Notifications(Base, CustomBase):
__tablename__ = "notifications"
id = Column(Integer, primary_key=True, index=True)
title = Column(String)
message = Column(String)
is_read = Column(Boolean, default=False)
sender_id = Column(Integer, ForeignKey("users.id"), nullable=False, index=True)
sender = relationship("Users", foreign_keys=[sender_id], back_populates="sent_notifications")
receiver_id = Column(Integer, ForeignKey("users.id"), nullable=False, index=True)
receiver = relationship("Users", foreign_keys=[receiver_id], back_populates="received_notifications")

11
models/OTP.py Normal file
View File

@ -0,0 +1,11 @@
from sqlalchemy import Column, Integer, String, DateTime
from database import Base
from .CustomBase import CustomBase
class OTP(Base, CustomBase):
__tablename__ = "otp"
id = Column(Integer, primary_key=True, index=True)
email = Column(String(255), nullable=False)
otp = Column(String(6), nullable=False)
expireAt = Column(DateTime, nullable=False)

19
models/Patients.py Normal file
View File

@ -0,0 +1,19 @@
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import relationship
from database import Base
from .CustomBase import CustomBase
class Patients(Base, CustomBase):
__tablename__ = "patients"
id = Column(Integer, primary_key=True, index=True)
name = Column(String)
age = Column(Integer, nullable=True)
email = Column(String, unique=True, index=True, nullable=True)
phone = Column(String, unique=True, index=True)
address = Column(String, nullable=True)
dob = Column(String, nullable=True)
appointments = relationship("Appointments", back_populates="patient")

16
models/PaymentLogs.py Normal file
View File

@ -0,0 +1,16 @@
from database import Base
from models.CustomBase import CustomBase
from sqlalchemy import Column, Integer, String, ForeignKey, Numeric
from sqlalchemy.orm import relationship
class PaymentLogs(Base, CustomBase):
__tablename__ = "payment_logs"
id = Column(Integer, primary_key=True, index=True)
customer_id = Column(String)
account_id = Column(String)
amount = Column(Numeric(10, 2))
clinic_id = Column(Integer)
unique_clinic_id = Column(String)
payment_status = Column(String)
metadata_logs = Column(String)

12
models/PaymentSessions.py Normal file
View File

@ -0,0 +1,12 @@
from sqlalchemy import Column, Integer, String
from database import Base
from .CustomBase import CustomBase
class PaymentSessions(Base, CustomBase):
__tablename__ = "payment_sessions"
id = Column(Integer, primary_key=True, index=True)
session_id = Column(String(255), unique=True, index=True)
customer_id = Column(String, nullable=False)
clinic_id = Column(Integer, nullable=False)
status = Column(String, nullable=False)

View File

@ -0,0 +1,11 @@
from sqlalchemy import Column, Integer, String
from database import Base
from .CustomBase import CustomBase
class ResetPasswordTokens(Base, CustomBase):
__tablename__ = "reset_password_tokens"
id = Column(Integer, primary_key=True)
email = Column(String)
token = Column(String)

View File

@ -0,0 +1,11 @@
from sqlalchemy import Column, Integer, Numeric
from database import Base
from .CustomBase import CustomBase
class SignupPricingMaster(Base, CustomBase):
__tablename__ = "signup_pricing_master"
id = Column(Integer, primary_key=True, index=True)
setup_fees = Column(Numeric(precision=10, scale=2))
subscription_fees = Column(Numeric(precision=10, scale=2))
per_call_charges = Column(Numeric(precision=10, scale=2))

15
models/StripeUsers.py Normal file
View File

@ -0,0 +1,15 @@
from database import Base
from models.CustomBase import CustomBase
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.orm import relationship
class StripeUsers(Base, CustomBase):
__tablename__ = "stripe_users"
id = Column(Integer, primary_key=True, index=True)
user_id = Column(Integer, nullable=True)
clinic_id = Column(Integer, ForeignKey('clinics.id'), nullable=True)
customer_id = Column(String)
account_id = Column(String)
clinic = relationship("Clinics", back_populates="stripe_user")

20
models/Subscriptions.py Normal file
View File

@ -0,0 +1,20 @@
from sqlalchemy import Column, Integer, String
from database import Base
from .CustomBase import CustomBase
class Subscriptions(Base, CustomBase):
__tablename__ = "subscriptions"
id = Column(Integer, primary_key=True, index=True)
# session_id = Column(String(255), index=True)
customer_id = Column(String,index=True)
account_id = Column(String,index=True)
total = Column(String)
setup_fee = Column(String)
subscription_fee = Column(String)
per_call_charge = Column(String)
subscription_id = Column(String,index=True)
clinic_id = Column(Integer, index=True)
status = Column(String)
current_period_start = Column(String) # unix timestamp
current_period_end = Column(String) # unix timestamp
metadata_logs = Column(String)

30
models/Users.py Normal file
View File

@ -0,0 +1,30 @@
from sqlalchemy import Column, Integer, String
from database import Base
from sqlalchemy import Enum
from enums.enums import ClinicUserRoles, UserType
from models.CustomBase import CustomBase
from sqlalchemy.orm import relationship
class Users(Base, CustomBase):
__tablename__ = "users"
id = Column(Integer, primary_key=True, index=True)
username = Column(String, index=True)
email = Column(String, unique=True, index=True)
password = Column(String)
clinicRole = Column(Enum(ClinicUserRoles), nullable=True)
userType = Column(Enum(UserType), nullable=True)
profile_pic = Column(String, nullable=True)
mobile = Column(String, nullable=True)
# Notification relationships
sent_notifications = relationship("Notifications", foreign_keys="Notifications.sender_id", back_populates="sender")
received_notifications = relationship("Notifications", foreign_keys="Notifications.receiver_id", back_populates="receiver")
# FCM relationships
fcm = relationship("Fcm", back_populates="user")
# Clinics created by this user
created_clinics = relationship("Clinics", back_populates="creator")
clinic_file_verifications = relationship("ClinicFileVerifications", back_populates="last_changed_by_user")
# No longer have Stripe relationships

47
models/__init__.py Normal file
View File

@ -0,0 +1,47 @@
from .Users import Users
from .Clinics import Clinics
from .Doctors import Doctors
from .Patients import Patients
from .Appointments import Appointments
from .Calendar import Calenders
from .AppointmentRelations import AppointmentRelations
from .MasterAppointmentTypes import MasterAppointmentTypes
from .ClinicDoctors import ClinicDoctors
from .Notifications import Notifications
from .CallTranscripts import CallTranscripts
from .Fcm import Fcm
from .BlockedEmail import BlockedEmail
from .SignupPricingMaster import SignupPricingMaster
from .ClinicFileVerifications import ClinicFileVerifications
from .OTP import OTP
from .ResetPasswordTokens import ResetPasswordTokens
from .ClinicOffers import ClinicOffers
from .StripeUsers import StripeUsers
from .PaymentLogs import PaymentLogs
from .PaymentSessions import PaymentSessions
from .Subscriptions import Subscriptions
__all__ = [
"Users",
"Clinics",
"Doctors",
"Patients",
"Appointments",
"Calenders",
"AppointmentRelations",
"MasterAppointmentTypes",
"ClinicDoctors",
"Notifications",
"CallTranscripts",
"Fcm",
"BlockedEmail",
"SignupPricingMaster",
"ClinicFileVerifications",
"OTP",
"ResetPasswordTokens",
"ClinicOffers",
"StripeUsers",
"PaymentLogs",
"PaymentSessions",
"Subscriptions"
]

8124
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,58 +0,0 @@
{
"name": "health-apps-admin",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "vite build",
"lint": "eslint .",
"preview": "vite preview"
},
"dependencies": {
"@emotion/react": "^11.10.5",
"@emotion/styled": "^11.10.5",
"@mui/icons-material": "^5.11.0",
"@mui/lab": "^5.0.0-alpha.117",
"@mui/material": "^5.11.7",
"@mui/styles": "^5.11.7",
"@mui/system": "^5.11.7",
"@mui/x-date-pickers": "^8.2.0",
"@mui/x-date-pickers-pro": "^8.4.0",
"axios": "^1.8.4",
"date-fns": "^4.1.0",
"firebase": "^11.6.0",
"formik": "^2.4.6",
"i": "^0.3.7",
"jwt-decode": "^4.0.0",
"lodash": "^4.17.21",
"material-react-table": "^3.2.1",
"npm": "^11.3.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-dropzone": "^14.3.8",
"react-image-crop": "^11.0.10",
"react-infinite-scroll-component": "^6.1.0",
"react-phone-input-2": "^2.15.1",
"react-redux": "^8.1.3",
"react-router-dom": "^6.20.0",
"react-toastify": "^9.1.3",
"react-zoom-pan-pinch": "^3.7.0",
"redux": "^5.0.1",
"redux-localstorage": "^0.4.1",
"redux-promise-middleware": "^6.2.0",
"redux-thunk": "^3.1.0",
"yup": "^1.6.1"
},
"devDependencies": {
"@eslint/js": "^9.22.0",
"@types/react": "^18.2.38",
"@types/react-dom": "^18.2.15",
"@vitejs/plugin-react-swc": "^3.8.0",
"eslint": "^9.22.0",
"eslint-plugin-react-hooks": "^5.2.0",
"eslint-plugin-react-refresh": "^0.4.19",
"globals": "^16.0.0",
"vite": "^6.3.1"
}
}

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>

Before

Width:  |  Height:  |  Size: 1.5 KiB

BIN
requirements.txt Normal file

Binary file not shown.

25
schemas/ApiResponse.py Normal file
View File

@ -0,0 +1,25 @@
from typing import Any, Optional, TypeVar, Generic
from pydantic import Field
from pydantic import BaseModel
from exceptions import ApiException
T = TypeVar('T')
class ApiResponse(BaseModel, Generic[T]):
"""Standard API response model matching Node.js implementation."""
data: Optional[T] = Field(default=None, description="Response data")
error: Optional[Any] = Field(default=None, description="Error details")
message: Optional[str] = Field(default=None, description="Response message")
@classmethod
def from_api_exception(cls, exception: ApiException) -> dict:
"""Create an API response from an API exception."""
import traceback
return cls(
data=None,
message=exception.message,
error=traceback.format_exc() if exception else None
).model_dump(exclude_none=True)

146
schemas/BaseSchemas.py Normal file
View File

@ -0,0 +1,146 @@
# schemas.py
from datetime import datetime
from typing import List, Optional
from pydantic import BaseModel, EmailStr
from enums.enums import AppointmentStatus, ClinicDoctorStatus, ClinicDoctorType, ClinicUserRoles, UserType, Integration
class SNSBase(BaseModel):
Type: str
MessageId: str
Token: str
TopicArn: str
SubscribeURL: str
Message: str
class AuthOTP(BaseModel):
email: EmailStr
otp: str
class ClinicFileVerificationBase(BaseModel):
abn_doc_is_verified: Optional[bool] = None
contract_doc_is_verified: Optional[bool] = None
logo_is_verified: Optional[bool] = None
last_changed_by: Optional[int] = None
class SignupPricingMasterBase(BaseModel):
setup_fees: Optional[float] = None
subscription_fees: Optional[float] = None
per_call_charges: Optional[float] = None
class AuthBase(BaseModel):
email: EmailStr
password: str
class ResetPasswordBase(BaseModel):
token: str
password: str
# Base schemas (shared attributes for create/read operations)
class ClinicBase(BaseModel):
name: str
address: Optional[str] = None
phone: str
emergency_phone: Optional[str] = None
email: Optional[EmailStr] = None
integration: Integration
pms_id: str
practice_name: str
logo: Optional[str] = None
country: Optional[str] = None
postal_code: Optional[str] = None
city: Optional[str] = None
state: Optional[str] = None
abn_doc: Optional[str] = None
abn_number: Optional[str] = None
contract_doc: Optional[str] = None
clinic_phone: Optional[str] = None
is_clinic_phone_enabled: Optional[bool] = None
other_info: Optional[str] = None
greeting_msg: Optional[str] = None
voice_model: Optional[str] = None
voice_model_provider: Optional[str] = None
voice_model_gender: Optional[str] = None
scenarios: Optional[str] = None
general_info: Optional[str] = None
creator_id: Optional[int] = None
fax: Optional[str] = None
class DoctorBase(BaseModel):
name: str
age: Optional[int] = None
email: Optional[EmailStr] = None
phone: str
address: Optional[str] = None
clinic_id: int
class PatientBase(BaseModel):
name: str
age: Optional[int] = None
email: Optional[EmailStr] = None
phone: Optional[str] = None
address: Optional[str] = None
dob: Optional[str] = None
class AppointmentBase(BaseModel):
doctor_id: int
patient_id: int
appointment_time: datetime
status: AppointmentStatus = AppointmentStatus.CONFIRMED
class CalendarBase(BaseModel):
doc_id: int
# rrule: str # Recurrence rule in iCalendar format
time: str
class MasterAppointmentTypeBase(BaseModel):
type: str
class UserBase(BaseModel):
username: str
email: EmailStr
password: str
clinicRole: Optional[ClinicUserRoles] = None
userType: Optional[UserType] = None
mobile: Optional[str] = None
class ClinicDoctorBase(BaseModel):
name: str
role: ClinicDoctorType
status: ClinicDoctorStatus
class CallTranscriptsBase(BaseModel):
patient_name:Optional[str] = None
patient_number:str
call_duration:str
call_received_time:datetime
transcript_key_id:str
class NotificationBase(BaseModel):
title: str
message: str
is_read: bool
sender_id: int
receiver_id: int
class ClinicOffersBase(BaseModel):
clinic_email: str
setup_fees_waived: bool
special_offer_for_month: str
class StripeUserBase(BaseModel):
account_id: str
customer_id: str
user_id: int

78
schemas/CreateSchemas.py Normal file
View File

@ -0,0 +1,78 @@
from .BaseSchemas import *
from datetime import datetime
from typing import Optional
from enums.enums import AppointmentStatus
# Create schemas (used for creating new records)
class ClinicCreate(ClinicBase):
pass
class CreateSuperAdmin(BaseModel):
username:str
email:EmailStr
class UpdateSuperAdmin(BaseModel):
username:str
class DoctorCreate(DoctorBase):
pass
class PatientCreate(PatientBase):
pass
class AppointmentCreate(AppointmentBase):
pass
class CalendarCreate(CalendarBase):
pass
class SignupPricingMasterCreate(SignupPricingMasterBase):
pass
class MasterAppointmentTypeCreate(MasterAppointmentTypeBase):
pass
class AppointmentCreateWithNames(BaseModel):
doctor_name: str
patient_name: str
appointment_time: datetime
status: AppointmentStatus = AppointmentStatus.CONFIRMED
class UserCreate(BaseModel):
# User data sent from frontend
user: UserBase
# Clinic data sent from frontend
clinic: ClinicBase
class ClinicDoctorCreate(BaseModel):
name: str
role: ClinicDoctorType
appointmentTypes: list[int]
class CallTranscriptsCreate(CallTranscriptsBase):
pass
class NotificationCreate(NotificationBase):
pass
class S3Create(BaseModel):
folder: str
file_name: str
clinic_id: Optional[str] = None
class ClinicOfferCreate(ClinicOffersBase):
pass

208
schemas/ResponseSchemas.py Normal file
View File

@ -0,0 +1,208 @@
from datetime import datetime
from typing import Any, List, Optional
from enums.enums import ClinicStatus
from .BaseSchemas import *
from pydantic import Field
# Response schemas (used for API responses)
class Clinic(ClinicBase):
id: int
create_time: datetime
update_time: datetime
status: ClinicStatus
class Config:
from_attributes = True
class ClinicDoctorResponse(ClinicDoctorBase):
id: int
create_time: datetime
update_time: datetime
class Config:
from_attributes = True
class SignupPricingMasterResponse(SignupPricingMasterBase):
id: int
create_time: datetime
update_time: datetime
class Config:
from_attributes = True
class UserResponse(UserBase):
id: int
create_time: datetime
update_time: datetime
password: str = Field(exclude=True)
created_clinics: Optional[List[Clinic]] = None
class Config:
from_attributes = True
populate_by_name = True
class Doctor(DoctorBase):
id: int
create_time: datetime
update_time: datetime
class Config:
from_attributes = True
class Patient(PatientBase):
id: int
create_time: datetime
update_time: datetime
class Config:
from_attributes = True
class AppointmentSchema(AppointmentBase):
id: int
create_time: datetime
update_time: datetime
class Config:
from_attributes = True
class Calendar(CalendarBase):
id: int
create_time: datetime
update_time: datetime
class Config:
from_attributes = True
# custom schema for response
class CalendarTimeSchema(BaseModel):
time: str
class Config:
from_attributes = True
class ClinicSchema(BaseModel):
id: int
name: str
address: str
phone: str
email: str
class Config:
from_attributes = True
# Detailed response schemas with nested relationships
class ClinicWithDoctors(Clinic):
doctors: List[Doctor] = []
class DoctorWithAppointments(Doctor):
appointments: List[AppointmentSchema] = []
calendars: List[CalendarTimeSchema] = []
clinic: ClinicSchema
class DoctorWithCalendar(Doctor):
calendars: List[CalendarTimeSchema] = []
clinic: ClinicSchema
class PatientWithAppointments(Patient):
appointments: List[AppointmentSchema] = []
class AppointmentDetailed(AppointmentSchema):
class Doctor(BaseModel):
id: int
name: str
age: int
email: str
phone: str
address: str
class Config:
from_attributes = True
class Patient(BaseModel):
id: int
name: str
age: int
email: str
phone: str
address: str
dob: str
class Config:
from_attributes = True
doctor: Doctor
patient: Patient
class CallTranscriptsResponse(CallTranscriptsBase):
id: int
create_time: datetime
update_time: datetime
class Config:
from_attributes = True
class NotificationResponse(NotificationBase):
id: int
create_time: datetime
update_time: datetime
class Config:
from_attributes = True
class MasterAppointmentTypeResponse(MasterAppointmentTypeBase):
id: int
create_time: datetime
update_time: datetime
class Config:
from_attributes = True
class ClinicDoctorResponse(ClinicDoctorBase):
id: int
create_time: datetime
update_time: datetime
appointmentTypes: Optional[List[MasterAppointmentTypeResponse]] = []
class Config:
from_attributes = True
allow_population_by_field_name = True
class ClinicOfferResponse(ClinicOffersBase):
id: int
create_time: datetime
update_time: datetime
class Config:
from_attributes = True
class StripeUserReponse(StripeUserBase):
id: int
create_time: datetime
update_time: datetime
class Config:
from_attributes = True

82
schemas/UpdateSchemas.py Normal file
View File

@ -0,0 +1,82 @@
from typing import Literal
from .BaseSchemas import *
from enums.enums import ClinicStatus, Integration
# Update schemas (all fields optional for partial updates)
class ClinicUpdate(BaseModel):
name: Optional[str] = None
address: Optional[str] = None
phone: Optional[str] = None
integration: Optional[Integration] = None
pms_id: Optional[str] = None
practice_name: Optional[str] = None
logo: Optional[str] = None
country: Optional[str] = None
postal_code: Optional[str] = None
city: Optional[str] = None
state: Optional[str] = None
abn_doc: Optional[str] = None
abn_number: Optional[str] = None
contract_doc: Optional[str] = None
clinic_phone: Optional[str] = None
is_clinic_phone_enabled: Optional[bool] = True
other_info: Optional[str] = None
greeting_msg: Optional[str] = None
voice_model: Optional[str] = None
voice_model_provider: Optional[str] = None
voice_model_gender: Optional[str] = None
scenarios: Optional[str] = None
general_info: Optional[str] = None
class ClinicStatusUpdate(BaseModel):
clinic_id: int
status: ClinicStatus
rejection_reason: Optional[str] = None
documentStatus: Optional[dict] = None
class SignupPricingMasterUpdate(SignupPricingMasterBase):
pass
class DoctorUpdate(BaseModel):
name: Optional[str] = None
age: Optional[int] = None
email: Optional[EmailStr] = None
phone: Optional[str] = None
address: Optional[str] = None
clinic_id: Optional[int] = None
class PatientUpdate(BaseModel):
name: Optional[str] = None
age: Optional[int] = None
email: Optional[EmailStr] = None
phone: Optional[str] = None
address: Optional[str] = None
class AppointmentUpdate(BaseModel):
doctor_id: Optional[int] = None
patient_id: Optional[int] = None
appointment_time: Optional[datetime] = None
status: Optional[AppointmentStatus] = None
class CalendarUpdate(BaseModel):
doc_id: Optional[int] = None
rrule: Optional[str] = None
class UserUpdate(BaseModel):
username: Optional[str] = None
clinicRole: Optional[ClinicUserRoles] = None
userType: Optional[UserType] = None
profile_pic: Optional[str] = None
password: Optional[str] = None
class ClinicDoctorUpdate(BaseModel):
name: Optional[str] = None
role: Optional[ClinicDoctorType] = None
status: Optional[ClinicDoctorStatus] = None
appointmentTypes: Optional[list[int]] = None

0
schemas/__init__.py Normal file
View File

0
services/__init__.py Normal file
View File

15
services/agentServices.py Normal file
View File

@ -0,0 +1,15 @@
from services.clinicServices import ClinicServices
from services.clinicDoctorsServices import ClinicDoctorsServices
class AgentServices:
def __init__(self):
self.clinicServices = ClinicServices()
self.clinicDoctorService = ClinicDoctorsServices()
async def get_clinic_by_phone(self, phone: str):
return await self.clinicServices.get_clinic_by_phone(phone)
async def get_clinic_doctors_with_appointments(self, clinic_id: int):
return await self.clinicDoctorService.get_clinic_doctors(clinic_id)

340
services/authService.py Normal file
View File

@ -0,0 +1,340 @@
from operator import or_
import os
import dotenv
dotenv.load_dotenv()
from interface.common_response import CommonResponse
from schemas.ResponseSchemas import UserResponse
import datetime
import json
import urllib.request
from services.jwtService import create_jwt_token
from services.userServices import UserServices
from models import BlockedEmail
from services.emailService import EmailService
from exceptions.validation_exception import ValidationException
from models import OTP
from enums.enums import UserType
from models import Users
from exceptions.resource_not_found_exception import ResourceNotFoundException
from models import ResetPasswordTokens
from utils.constants import generateOTP
from utils.password_utils import generate_reset_password_token, generate_secure_password, hash_password, verify_password
from schemas.CreateSchemas import CreateSuperAdmin, UpdateSuperAdmin, UserCreate
from schemas.BaseSchemas import AuthBase, AuthOTP
from exceptions.unauthorized_exception import UnauthorizedException
from database import get_db
from loguru import logger
class AuthService:
def __init__(self):
self.user_service = UserServices()
self.db = next(get_db())
self.email_service = EmailService()
self.url = os.getenv("FRONTEND_URL")
self.logger = logger
async def login(self, data: AuthBase) -> str:
try:
# get user
user = await self.user_service.get_user_by_email(data.email)
# verify password
if not verify_password(data.password, user.password):
raise UnauthorizedException("Invalid credentials")
user_dict = user.model_dump(
exclude={"password": True, "created_clinics": True},
exclude_none=True,
mode="json"
)
# create token
token = create_jwt_token(user_dict)
return token
except Exception as e:
self.logger.error(f"Error logging in: {e}")
raise e
finally:
self.db.close()
async def register(self, user_data: UserCreate, background_tasks=None):
try:
resp = await self.user_service.create_user(user_data, background_tasks)
# Get the SQLAlchemy model instance
user_obj = resp["user"]
# create token with user data
user_data = {
"id": user_obj["id"],
"username": user_obj["username"],
"email": user_obj["email"],
"clinicRole": user_obj["clinicRole"],
"userType": user_obj["userType"],
"mobile": user_obj["mobile"],
"clinicId": user_obj["clinicId"]
}
token = create_jwt_token(user_data)
# Update response with token
resp["token"] = token
response = {
"url": resp.get("url"),
"token": token
}
return response
except Exception as e:
self.logger.error(f"Error registering user: {e}")
raise e
def blockEmailSNS(self, body: str):
try:
# confirm subscription
if body["Type"] == "SubscriptionConfirmation":
urllib.request.urlopen(body["SubscribeURL"])
# disable automatic unsubscribe confirmation by activating subscription again
elif body["Type"] == "UnsubscribeConfirmation":
urllib.request.urlopen(body["SubscribeURL"])
# handle bounce notifications only
elif body["Type"] == "Notification":
msg = json.loads(body["Message"])
# check if msg contains notificationType
if "notificationType" not in msg:
return
recepients = msg["bounce"]["bouncedRecipients"]
for recipient in recepients:
blockEmail = BlockedEmail(email=recipient["emailAddress"], reason=msg["notificationType"], severity=msg["bounce"]["bounceType"])
self.db.add(blockEmail)
self.db.commit()
return "OK"
except Exception as e:
self.logger.error(f"Error blocking email: {e}")
raise
finally:
self.db.close()
async def send_otp(self, email:str):
try:
# check if email exists
user = self.db.query(Users).filter(Users.email == email.lower()).first()
if user:
raise ValidationException("User with same email already exists")
otp = generateOTP()
self.email_service.send_otp_email(email, otp)
# Create OTP record with proper datetime handling
expire_time = datetime.datetime.now() + datetime.timedelta(minutes=10)
otp_record = OTP(email=email, otp=otp, expireAt=expire_time)
self.db.add(otp_record)
self.db.commit()
return
except Exception as e:
self.logger.error(f"Error sending OTP: {e}")
raise
finally:
self.db.close()
async def verify_otp(self, data: AuthOTP):
try:
db_otp = self.db.query(OTP).filter(OTP.email == data.email, OTP.otp == data.otp).first()
if not db_otp:
raise ValidationException("Invalid OTP")
if db_otp.otp != data.otp:
raise ValidationException("Invalid OTP")
if db_otp.expireAt < datetime.datetime.now():
raise ValidationException("OTP expired")
# OTP is valid, delete it to prevent reuse
self.db.delete(db_otp)
self.db.commit()
return
except Exception as e:
self.logger.error(f"Error verifying OTP: {e}")
raise
finally:
self.db.close()
async def get_admins(self, user, limit:int, offset:int, search:str):
try:
if user["userType"] != UserType.SUPER_ADMIN:
raise UnauthorizedException("User is not authorized to perform this action")
admins = self.db.query(Users).filter(Users.userType == UserType.SUPER_ADMIN)
total = self.db.query(Users).filter(Users.userType == UserType.SUPER_ADMIN).count()
if search:
admins = admins.filter(
or_(
Users.username.contains(search),
Users.email.contains(search),
)
)
total = admins.count()
admins = admins.limit(limit).offset(offset).all()
response = [UserResponse(**admin.__dict__.copy()) for admin in admins]
common_response = CommonResponse(data=response, total=total)
return common_response
except Exception as e:
self.logger.error(f"Error getting admins: {e}")
raise e
finally:
self.db.close()
async def create_super_admin(self, user, data: CreateSuperAdmin):
try:
if user["userType"] != UserType.SUPER_ADMIN:
raise UnauthorizedException("User is not authorized to perform this action")
# password = "admin@123"
password = generate_secure_password()
hashed_password = hash_password(password)
# check if username and email are unique
existing_user = (
self.db.query(Users)
.filter(
Users.email == data.email.lower(),
)
.first()
)
if existing_user:
raise ValidationException("User with same email already exists")
user = Users(
username=data.username,
email=data.email.lower(),
password=hashed_password,
userType=UserType.SUPER_ADMIN,
)
self.db.add(user)
self.db.commit()
LOGIN_URL = self.url
# send email to user
self.email_service.send_new_admin_email(data.username, data.email, password, LOGIN_URL)
return
except Exception as e:
self.logger.error(f"Error creating super admin: {e}")
raise e
finally:
self.db.close()
async def update_super_admin(self, user, user_id: int, data: UpdateSuperAdmin):
try:
if user["userType"] != UserType.SUPER_ADMIN:
raise UnauthorizedException("User is not authorized to perform this action")
user = self.db.query(Users).filter(Users.id == user_id).first()
if not user:
raise ResourceNotFoundException("User not found")
user.username = data.username
self.db.add(user)
self.db.commit()
return
except Exception as e:
self.logger.error(f"Error updating super admin: {e}")
raise e
finally:
self.db.close()
async def delete_super_admin(self, user, user_id: int):
try:
if user["userType"] != UserType.SUPER_ADMIN:
raise UnauthorizedException("User is not authorized to perform this action")
user = self.db.query(Users).filter(Users.id == user_id).first()
if not user:
raise ResourceNotFoundException("User not found")
user.soft_delete(self.db)
return
except Exception as e:
self.logger.error(f"Error deleting super admin: {e}")
raise e
finally:
self.db.close()
async def forget_password(self, email: str):
try:
user = self.db.query(Users).filter(Users.email == email.lower()).first()
if not user:
raise ResourceNotFoundException("User not found")
# get reset password token
reset_password_token = generate_reset_password_token()
reset_password = ResetPasswordTokens(email=email, token=reset_password_token)
self.db.add(reset_password)
self.db.commit()
reset_password_url = f"{self.url}auth/reset-password?token={reset_password_token}"
self.email_service.send_reset_password_email(email, reset_password_url)
return
except Exception as e:
self.logger.error(f"Error forgetting password: {e}")
raise e
finally:
self.db.close()
async def reset_password(self, token: str, password: str):
try:
reset_password = self.db.query(ResetPasswordTokens).filter(ResetPasswordTokens.token == token).first()
if not reset_password:
raise ResourceNotFoundException("Reset password token not found")
user = self.db.query(Users).filter(Users.email == reset_password.email).first()
if not user:
raise ResourceNotFoundException("User not found")
user.password = hash_password(password)
self.db.delete(reset_password)
self.db.commit()
return
except Exception as e:
self.logger.error(f"Error resetting password: {e}")
raise e
finally:
self.db.close()

227
services/bot.py Normal file
View File

@ -0,0 +1,227 @@
#
# Copyright (c) 2025, Daily
#
# SPDX-License-Identifier: BSD 2-Clause License
#
import datetime
import io
import os
import sys
import wave
import aiofiles
from dotenv import load_dotenv
from fastapi import WebSocket
from loguru import logger
from pipecat.audio.vad.silero import SileroVADAnalyzer
from pipecat.pipeline.pipeline import Pipeline
from pipecat.pipeline.runner import PipelineRunner
from pipecat.pipeline.task import PipelineParams, PipelineTask
from pipecat.processors.aggregators.openai_llm_context import OpenAILLMContext
from pipecat.processors.audio.audio_buffer_processor import AudioBufferProcessor
from pipecat.serializers.twilio import TwilioFrameSerializer
from pipecat.services.elevenlabs import ElevenLabsTTSService
from pipecat.services.playht import PlayHTTTSService, Language
from pipecat.services.deepgram import DeepgramSTTService
from pipecat.services.fish import FishAudioTTSService
from pipecat.services.rime import RimeTTSService
from pipecat.services.cartesia import CartesiaTTSService
from pipecat.services.openai_realtime_beta import (
OpenAIRealtimeBetaLLMService,
SessionProperties,
TurnDetection,
)
from pipecat.services.anthropic import AnthropicLLMService
from pipecat.services.openai import OpenAILLMService
from pipecat.services.google import GoogleLLMService, GoogleLLMContext
from pipecat.transports.network.fastapi_websocket import (
FastAPIWebsocketParams,
FastAPIWebsocketTransport,
)
load_dotenv(override=True)
logger.remove(0)
logger.add(sys.stderr, level="DEBUG")
async def save_audio(
server_name: str, audio: bytes, sample_rate: int, num_channels: int
):
if len(audio) > 0:
filename = f"{server_name}_recording_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.wav"
with io.BytesIO() as buffer:
with wave.open(buffer, "wb") as wf:
wf.setsampwidth(2)
wf.setnchannels(num_channels)
wf.setframerate(sample_rate)
wf.writeframes(audio)
async with aiofiles.open(filename, "wb") as file:
await file.write(buffer.getvalue())
logger.info(f"Merged audio saved to {filename}")
else:
logger.info("No audio data to save")
async def run_bot(
websocket_client: WebSocket, stream_sid: str, testing: bool, option: int = 1
):
transport = FastAPIWebsocketTransport(
websocket=websocket_client,
params=FastAPIWebsocketParams(
audio_in_enabled=True,
audio_out_enabled=True,
add_wav_header=False,
vad_enabled=True,
vad_analyzer=SileroVADAnalyzer(),
vad_audio_passthrough=True,
serializer=TwilioFrameSerializer(stream_sid),
),
)
# llm = OpenAIRealtimeBetaLLMService(
# api_key=os.getenv("OPENAI_API_KEY"),
# session_properties=SessionProperties(
# modalities=["text"],
# turn_detection=TurnDetection(threshold=0.5, silence_duration_ms=800),
# voice=None,
# ),
# )
llm = OpenAILLMService(api_key=os.getenv("OPENAI_API_KEY"), model="gpt-4o")
# llm = AnthropicLLMService(api_key=os.getenv("ANTRHOPIC_API_KEY"))
# llm = GoogleLLMService(api_key=os.getenv("GOOGLE_API_KEY"), model="phone_call")
stt = DeepgramSTTService(
api_key=os.getenv("DEEPGRAM_API_KEY"), audio_passthrough=True
)
# tts = PlayHTTTSService(
# api_key=os.getenv("PLAYHT_SECRE_KEY"),
# user_id=os.getenv("PLAYHT_USERID"),
# voice_url="s3://voice-cloning-zero-shot/80ba8839-a6e6-470c-8f68-7c1e5d3ee2ff/abigailsaad/manifest.json",
# params=PlayHTTTSService.InputParams(
# language=Language.EN,
# speed=1.0,
# ),
# ) # not working
# tts = FishAudioTTSService(
# api_key=os.getenv("FISH_AUDIO_API_KEY"),
# model="b545c585f631496c914815291da4e893", # Get this from Fish Audio playground
# output_format="pcm", # Choose output format
# sample_rate=24000, # Set sample rate
# params=FishAudioTTSService.InputParams(latency="normal", prosody_speed=1.0),
# ) # not working
if option == 1:
tts = CartesiaTTSService(
api_key=os.getenv("CARTESIA_API_KEY"),
voice_id="156fb8d2-335b-4950-9cb3-a2d33befec77", # British Lady
push_silence_after_stop=testing,
)
elif option == 2:
tts = RimeTTSService(
api_key=os.getenv("RIME_API_KEY"),
voice_id="stream",
model="mistv2",
)
elif option == 3:
tts = ElevenLabsTTSService(
api_key=os.getenv("ELEVEN_LABS_API_KEY"),
voice_id="79a125e8-cd45-4c13-8a67-188112f4dd22",
push_silence_after_stop=testing,
)
elif option == 4:
tts = RimeTTSService(
api_key=os.getenv("RIME_API_KEY"),
voice_id="breeze",
model="mistv2",
)
elif option == 5:
tts = CartesiaTTSService(
api_key=os.getenv("CARTESIA_API_KEY"),
voice_id="1d3ba41a-96e6-44ad-aabb-9817c56caa68", # British Lady
push_silence_after_stop=testing,
)
else:
tts = RimeTTSService(
api_key=os.getenv("RIME_API_KEY"),
voice_id="peak",
model="mistv2",
)
messages = [
{
"role": "system",
"content": f"""
Welcome to 365 Days Medical Centre Para Hills - we care about you.
If this is an emergency, please call triple zero.
We are open from 8 AM to 8 PM every day of the year.
All calls are recorded for training and quality purposes - please let us know if you do not wish to be recorded.
I am Nishka, your 24/7 healthcare receptionist. Which language would you like to speak?
""",
}
]
context = OpenAILLMContext(messages)
context_aggregator = llm.create_context_aggregator(context)
# NOTE: Watch out! This will save all the conversation in memory. You can
# pass `buffer_size` to get periodic callbacks.
audiobuffer = AudioBufferProcessor(user_continuous_stream=not testing)
pipeline = Pipeline(
[
transport.input(), # Websocket input from client
stt, # Speech-To-Text
context_aggregator.user(), # User context
llm, # LLM
tts, # Text-To-Speech
transport.output(), # Websocket output to client
audiobuffer, # Used to buffer the audio in the pipeline
context_aggregator.assistant(), # Assistant context
]
)
task = PipelineTask(
pipeline,
params=PipelineParams(
audio_in_sample_rate=8000,
audio_out_sample_rate=8000,
allow_interruptions=True,
),
)
@transport.event_handler("on_client_connected")
async def on_client_connected(transport, client):
# Start recording.
await audiobuffer.start_recording()
# Kick off the conversation.
messages.append(
{"role": "system", "content": "Please introduce yourself to the user."}
)
await task.queue_frames([context_aggregator.user().get_context_frame()])
@transport.event_handler("on_client_disconnected")
async def on_client_disconnected(transport, client):
await task.cancel()
# @audiobuffer.event_handler("on_audio_data")
# async def on_audio_data(buffer, audio, sample_rate, num_channels):
# server_name = f"server_{websocket_client.client.port}"
# await save_audio(server_name, audio, sample_rate, num_channels)
# We use `handle_sigint=False` because `uvicorn` is controlling keyboard
# interruptions. We use `force_gc=True` to force garbage collection after
# the runner finishes running a task which could be useful for long running
# applications with multiple clients connecting.
runner = PipelineRunner(handle_sigint=False, force_gc=True)
await runner.run(task)

186
services/callTranscripts.py Normal file
View File

@ -0,0 +1,186 @@
import datetime
from typing import Optional
from fastapi import BackgroundTasks
from sqlalchemy.orm import Session
import tempfile
import zipfile
import time
from fastapi.responses import FileResponse
import os
from concurrent.futures import ThreadPoolExecutor, as_completed
from sqlalchemy import desc
from schemas.ResponseSchemas import CallTranscriptsResponse
from database import get_db
from models.CallTranscripts import CallTranscripts
from exceptions.business_exception import BusinessValidationException
from services.s3Service import get_signed_url
from interface.common_response import CommonResponse
from loguru import logger
from schemas.CreateSchemas import CallTranscriptsCreate
from exceptions.db_exceptions import DBExceptionHandler
class CallTranscriptServices:
def __init__(self):
self.db:Session = next(get_db())
self.logger = logger
async def create_call_transcript(self, data:CallTranscriptsCreate):
try:
call_transcript = CallTranscripts(**data.model_dump())
self.db.add(call_transcript)
self.db.commit()
return
except Exception as e:
DBExceptionHandler.handle_exception(e, context="creating call transcript")
finally:
self.db.close()
async def get_call_transcripts(self, limit:int, offset:int, search: str = "", orderBy: str = "call_received_time", order: str = "ASC", startDate: Optional[datetime.datetime] = None, endDate: Optional[datetime.datetime] = None):
try:
query = self.db.query(CallTranscripts).order_by(desc(getattr(CallTranscripts, orderBy)) if order == "DESC" else getattr(CallTranscripts, orderBy))
if search:
query = query.filter(CallTranscripts.patient_number.contains(search))
if startDate and endDate:
query = query.filter(CallTranscripts.call_received_time.between(startDate, endDate))
call_transcripts = query.limit(limit).offset(offset).all()
total = self.db.query(CallTranscripts).count()
response = [CallTranscriptsResponse(**call_transcript.__dict__.copy()) for call_transcript in call_transcripts]
for call_transcript in response:
call_transcript.transcript_key_id = await get_signed_url(call_transcript.transcript_key_id)
return_response = CommonResponse(data=response, total=total)
return return_response
except Exception as e:
DBExceptionHandler.handle_exception(e, context="getting call transcripts")
finally:
self.db.close()
async def download_call_transcript(self, key_id: str):
try:
call_transcript = self.db.query(CallTranscripts).filter(CallTranscripts.transcript_key_id == key_id).first()
if not call_transcript:
raise BusinessValidationException("Call transcript not found!")
return get_signed_url(call_transcript.transcript_key_id)
except Exception as e:
DBExceptionHandler.handle_exception(e, context="downloading call transcript")
finally:
self.db.close()
def download_file(self, url: str, file_path: str) -> None:
"""
Download a file from a signed URL to a local path.
Args:
url: The pre-signed URL to download from
file_path: The local path to save the file to
"""
try:
import requests
response = requests.get(url)
if response.status_code == 200:
with open(file_path, 'wb') as f:
f.write(response.content)
else:
print(f"Failed to download file: {response.status_code}")
except Exception as e:
print(f"Error downloading file: {e}")
def cleanup_temp_files(self, temp_dir: str, zip_path: str) -> None:
"""
Clean up temporary files after sending the zip.
Args:
temp_dir: Directory containing temporary files
zip_path: Path to the zip file
"""
try:
# Wait a short time to ensure the file has been sent
time.sleep(5)
# Remove the zip file
if os.path.exists(zip_path):
os.remove(zip_path)
# Remove the temp directory and all its contents
if os.path.exists(temp_dir):
for file in os.listdir(temp_dir):
os.remove(os.path.join(temp_dir, file))
os.rmdir(temp_dir)
except Exception as e:
print(f"Error during cleanup: {e}")
async def bulk_download_call_transcripts(self, key_ids: list[int], background_tasks: BackgroundTasks):
try:
transcript_ids = self.db.query(CallTranscripts).filter(CallTranscripts.id.in_(key_ids)).all()
keys = [transcript.transcript_key_id for transcript in transcript_ids]
if len(keys) < 1:
raise BusinessValidationException("No call transcripts found!")
temp_dir = tempfile.mkdtemp(prefix="call_transcripts_")
zip_path = os.path.join(temp_dir, "call_transcripts.zip")
# Prepare download information
download_info = []
for key in keys:
# Generate signed URL for each key
url = await get_signed_url(key)
# Determine filename (using key's basename or a formatted name)
filename = os.path.basename(key)
file_path = os.path.join(temp_dir, filename)
download_info.append((url, file_path, filename))
# Use ThreadPoolExecutor for concurrent downloads
# Adjust max_workers based on your system capabilities and S3 rate limits
max_workers = min(32, len(download_info)) # Cap at 32 threads or number of files, whichever is smaller
with ThreadPoolExecutor(max_workers=max_workers) as executor:
# Submit all download tasks
future_to_file = {executor.submit(self.download_file, url, file_path): (file_path, filename)
for url, file_path, filename in download_info}
# Collect results as they complete
file_paths = []
for future in as_completed(future_to_file):
file_path, filename = future_to_file[future]
try:
future.result() # Get the result to catch any exceptions
file_paths.append((file_path, filename))
except Exception as e:
print(f"Error downloading {filename}: {e}")
# Create zip file from downloaded files
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zip_file:
for file_path, arcname in file_paths:
if os.path.exists(file_path):
zip_file.write(file_path, arcname=arcname)
# Add cleanup task to run after response is sent
# background_tasks.add_task(self.cleanup_temp_files, temp_dir, zip_path)
# Return the zip file as a response
return FileResponse(
path=zip_path,
media_type="application/zip",
filename="call_transcripts.zip",
# background=background_tasks
)
except Exception as e:
DBExceptionHandler.handle_exception(e, context="bulk downloading call transcripts")
finally:
self.db.close()

View File

@ -0,0 +1,305 @@
from loguru import logger
from schemas.CreateSchemas import ClinicDoctorCreate
from schemas.UpdateSchemas import ClinicDoctorUpdate
from schemas.ResponseSchemas import ClinicDoctorResponse, MasterAppointmentTypeResponse
from database import get_db
from sqlalchemy.orm import Session, joinedload, selectinload
from services.clinicServices import ClinicServices
from exceptions import ResourceNotFoundException
from interface.common_response import CommonResponse
from sqlalchemy import func, or_, cast, String
from enums.enums import ClinicDoctorStatus, UserType
from models import MasterAppointmentTypes, AppointmentRelations, Users, ClinicDoctors
from utils.constants import DEFAULT_ORDER, DEFAULT_ORDER_BY
class ClinicDoctorsServices:
def __init__(self):
self.db: Session = next(get_db())
self.clinic_services = ClinicServices()
self.logger = logger
async def create_clinic_doctor(
self, user, clinic_doctor: ClinicDoctorCreate
) -> ClinicDoctorResponse:
try:
if user["userType"] != UserType.CLINIC_ADMIN:
self.logger.error("user is not clinic admin")
raise ResourceNotFoundException(
"You are not authorized to perform this action"
)
if not user["created_clinics"][0]["id"]:
self.logger.error("user has no clinics")
raise ResourceNotFoundException(
"You are not authorized to perform this action"
)
# verify all appointment types exist in a single query
if clinic_doctor.appointmentTypes:
existing_types = {
t.id
for t in self.db.query(MasterAppointmentTypes.id)
.filter(
MasterAppointmentTypes.id.in_(clinic_doctor.appointmentTypes)
)
.all()
}
missing_types = set(clinic_doctor.appointmentTypes) - existing_types
if missing_types:
raise ResourceNotFoundException(
f"Appointment types not found: {', '.join(map(str, missing_types))}"
)
# check if clinic exists
await self.clinic_services.get_clinic_by_id(
user["created_clinics"][0]["id"]
)
# exclude appointmentTypes from clinic_doctor
clinic_doctor_db = ClinicDoctors(
name=clinic_doctor.name,
clinic_id=user["created_clinics"][0]["id"],
role=clinic_doctor.role,
status=ClinicDoctorStatus.ACTIVE,
)
self.db.add(clinic_doctor_db)
self.db.flush()
# create clinic doctor appointment types
for appointment_type_id in clinic_doctor.appointmentTypes:
clinic_doctor_appointment_type = AppointmentRelations(
clinic_doctor_id=clinic_doctor_db.id,
appointment_type_id=appointment_type_id,
)
self.db.add(clinic_doctor_appointment_type)
self.db.commit()
return
except Exception as e:
self.logger.error(e)
self.db.rollback()
raise e
finally:
self.db.close()
async def update_clinic_doctor(
self, user, clinic_doctor_id: int, clinic_doctor_data: ClinicDoctorUpdate
) -> ClinicDoctorResponse:
try:
if user["userType"] != UserType.CLINIC_ADMIN:
self.logger.error("user is not clinic admin")
raise ResourceNotFoundException(
"You are not authorized to perform this action"
)
if not user["created_clinics"][0]["id"]:
self.logger.error("user has no clinics")
raise ResourceNotFoundException(
"You are not authorized to perform this action"
)
# verify all appointment types exist in a single query
if clinic_doctor_data.appointmentTypes:
existing_types = {
t.id
for t in self.db.query(MasterAppointmentTypes.id)
.filter(
MasterAppointmentTypes.id.in_(
clinic_doctor_data.appointmentTypes
)
)
.all()
}
missing_types = set(clinic_doctor_data.appointmentTypes) - existing_types
if missing_types:
raise ResourceNotFoundException(
f"Appointment types not found: {', '.join(map(str, missing_types))}"
)
# check if clinic doctor exists
clinic_doctor = (
self.db.query(ClinicDoctors)
.filter(ClinicDoctors.id == clinic_doctor_id)
.first()
)
if clinic_doctor is None:
raise ResourceNotFoundException("Clinic doctor not found")
# Update the existing object with new values
update_data = clinic_doctor_data.model_dump(exclude_unset=True)
for key, value in update_data.items():
setattr(clinic_doctor, key, value)
self.db.add(clinic_doctor)
# delete existing clinic doctor appointment types
self.db.query(AppointmentRelations).filter(
AppointmentRelations.clinic_doctor_id == clinic_doctor_id
).delete()
# create clinic doctor appointment types
for appointment_type_id in clinic_doctor_data.appointmentTypes:
clinic_doctor_appointment_type = AppointmentRelations(
clinic_doctor_id=clinic_doctor_id,
appointment_type_id=appointment_type_id,
)
self.db.add(clinic_doctor_appointment_type)
self.db.commit()
return
except Exception as e:
self.db.rollback()
raise e
finally:
self.db.close()
async def delete_clinic_doctor(self, clinic_doctor_id: int):
try:
clinic_doctor = (
self.db.query(ClinicDoctors)
.filter(ClinicDoctors.id == clinic_doctor_id)
.first()
)
self.db.delete(clinic_doctor)
self.db.commit()
except Exception as e:
raise e
finally:
self.db.close()
async def get_doctor_status_count(self, clinic_id: int):
try:
# Query to count doctors by status
status_counts = (
self.db.query(
ClinicDoctors.status, func.count(ClinicDoctors.id).label("count")
)
.filter(ClinicDoctors.clinic_id == clinic_id)
.group_by(ClinicDoctors.status)
.all()
)
# Initialize result dictionary with all possible statuses set to 0
result = {status.value: 0 for status in ClinicDoctorStatus}
# Update with actual counts from the query
for status, count in status_counts:
result[status.value] = count
return result
except Exception as e:
raise e
finally:
self.db.close()
async def get_clinic_doctors(
self,
clinic_id: int,
limit: int | None = None,
offset: int | None = None,
search: str = "",
sort_by: str = DEFAULT_ORDER,
sort_order: str = DEFAULT_ORDER_BY,
):
try:
response = await self._get_clinic_doctors(
clinic_id, limit, offset, search, sort_by, sort_order
)
return response
except Exception as e:
self.logger.error(e)
raise e
async def _get_clinic_doctors(
self,
clinic_id: int,
limit: int | None = None,
offset: int | None = None,
search: str = "",
sort_by: str = DEFAULT_ORDER,
sort_order: str = DEFAULT_ORDER_BY,
):
try:
clinic_doctors_query = (
self.db.query(ClinicDoctors)
.filter(ClinicDoctors.clinic_id == clinic_id)
.options(
selectinload(ClinicDoctors.appointmentRelations).selectinload(
AppointmentRelations.masterAppointmentTypes
)
)
.order_by(
getattr(ClinicDoctors, sort_by).desc()
if sort_order == "desc"
else getattr(ClinicDoctors, sort_by).asc()
)
)
total = self.db.query(ClinicDoctors).count()
if search:
clinic_doctors_query = clinic_doctors_query.filter(
or_(
ClinicDoctors.name.ilike(f"%{search}%"),
cast(ClinicDoctors.role, String).ilike(f"%{search}%"),
ClinicDoctors.appointmentRelations.any(
AppointmentRelations.masterAppointmentTypes.has(
MasterAppointmentTypes.type.ilike(f"%{search}%")
)
),
)
)
total = clinic_doctors_query.count()
if limit and offset:
clinic_doctors_query = clinic_doctors_query.limit(limit).offset(offset)
clinic_doctors = clinic_doctors_query.all()
# Build response data manually to include appointment types
response_data = []
for clinic_doctor in clinic_doctors:
# Extract appointment types from the relationships
appointment_types = []
for relation in clinic_doctor.appointmentRelations:
if relation.masterAppointmentTypes:
appointment_types.append(
MasterAppointmentTypeResponse(
id=relation.masterAppointmentTypes.id,
type=relation.masterAppointmentTypes.type,
create_time=relation.masterAppointmentTypes.create_time,
update_time=relation.masterAppointmentTypes.update_time,
)
)
# Create the clinic doctor response
clinic_doctor_data = ClinicDoctorResponse(
id=clinic_doctor.id,
name=clinic_doctor.name,
role=clinic_doctor.role,
status=clinic_doctor.status,
create_time=clinic_doctor.create_time,
update_time=clinic_doctor.update_time,
appointmentTypes=appointment_types,
)
response_data.append(clinic_doctor_data)
response = CommonResponse(
data=response_data,
total=total,
)
return response
except Exception as e:
self.logger.error(e)
raise e
finally:
self.db.close()

Some files were not shown because too many files have changed in this diff Show More