Spaces:
Sleeping
Sleeping
testing payslip
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .github/workflows/notify.yml +44 -0
- .gitignore +4 -0
- Dockerfile +11 -0
- alembic.ini +147 -0
- alembic/README +1 -0
- alembic/env.py +78 -0
- alembic/script.py.mako +29 -0
- alembic/versions/1cbb7317540a_fix_water_log_id_uuid.py +32 -0
- alembic/versions/1eacc17f4c52_add_leaves_table_and_device_tokens_array.py +33 -0
- alembic/versions/217db60578fa_added_journal_table.py +44 -0
- alembic/versions/52828bff621c_updated_leave_status_enum_cancelled.py +31 -0
- alembic/versions/584a5111e60f_initial_migration.py +33 -0
- alembic/versions/5b5a3c7a6255_update_cascade_deletion.py +99 -0
- alembic/versions/9176b44b89ba_added_water_logs_table.py +42 -0
- alembic/versions/a3c79664f866_sync_models.py +33 -0
- alembic/versions/b33e3b5b7af9_added_roles.py +33 -0
- alembic/versions/d9b4df655a55_updated_emotion_tag.py +45 -0
- alembic/versions/d9bb355538fd_add_is_read_to_leave_table.py +34 -0
- alembic/versions/dd61202db14f_add_knowledgebase_chunk.py +33 -0
- alembic/versions/e8066533b622_delete_user_verification_cols.py +49 -0
- alembic/versions/e95f62f91348_added_leave_and_userdevice_table.py +33 -0
- alembic/versions/e96769f268bc_fix_post_s_user_cascade.py +41 -0
- alembic/versions/f6a1d6fc82d0_add_image_url_column_to_kb.py +33 -0
- alembic/versions/fec3872d7eba_add_payslip_table.py +41 -0
- queries/Query.sql +0 -0
- requirements.txt +80 -0
- src/auth/__init__.py +0 -0
- src/auth/config.py +17 -0
- src/auth/constants.py +2 -0
- src/auth/dependencies.py +0 -0
- src/auth/exceptions.py +0 -0
- src/auth/feed_db_script.py +132 -0
- src/auth/models.py +2 -0
- src/auth/router.py +149 -0
- src/auth/schemas.py +38 -0
- src/auth/service.py +156 -0
- src/auth/utils.py +206 -0
- src/chatbot/__init__.py +0 -0
- src/chatbot/config.py +6 -0
- src/chatbot/constants.py +2 -0
- src/chatbot/dependencies.py +0 -0
- src/chatbot/embedding.py +71 -0
- src/chatbot/exceptions.py +0 -0
- src/chatbot/models.py +36 -0
- src/chatbot/router.py +159 -0
- src/chatbot/schemas.py +41 -0
- src/chatbot/service.py +71 -0
- src/chatbot/utils.py +57 -0
- src/core/__init__.py +9 -0
- src/core/config.py +79 -0
.github/workflows/notify.yml
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Daily Emotion Check-In Notifications
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
schedule:
|
| 5 |
+
- cron: "30 3 * * *" # 9 AM IST
|
| 6 |
+
- cron: "15 11 * * *" # 4:45 PM IST
|
| 7 |
+
workflow_dispatch: {}
|
| 8 |
+
|
| 9 |
+
jobs:
|
| 10 |
+
morning_notify:
|
| 11 |
+
if: github.event.schedule == '30 3 * * *'
|
| 12 |
+
runs-on: ubuntu-latest
|
| 13 |
+
steps:
|
| 14 |
+
- name: Send morning notification
|
| 15 |
+
run: |
|
| 16 |
+
curl -X POST "https://yuvabe-ai-yuvabe-app-backend.hf.space/home/notify/all" \
|
| 17 |
+
-H "accept: application/json" \
|
| 18 |
+
-H "Content-Type: application/json" \
|
| 19 |
+
-d '{
|
| 20 |
+
"title": "Good Morning!",
|
| 21 |
+
"body": "How are you feeling today?",
|
| 22 |
+
"data": {
|
| 23 |
+
"type": "home_alert",
|
| 24 |
+
"message": "Good morning! Would you like to share how you feel today?"
|
| 25 |
+
}
|
| 26 |
+
}'
|
| 27 |
+
|
| 28 |
+
evening_notify:
|
| 29 |
+
if: github.event.schedule == '15 11 * * *'
|
| 30 |
+
runs-on: ubuntu-latest
|
| 31 |
+
steps:
|
| 32 |
+
- name: Send evening notification
|
| 33 |
+
run: |
|
| 34 |
+
curl -X POST "https://yuvabe-ai-yuvabe-app-backend.hf.space/home/notify/all" \
|
| 35 |
+
-H "accept: application/json" \
|
| 36 |
+
-H "Content-Type: application/json" \
|
| 37 |
+
-d '{
|
| 38 |
+
"title": "Good Evening!",
|
| 39 |
+
"body": "Take a moment to reflect on your day.",
|
| 40 |
+
"data": {
|
| 41 |
+
"type": "home_alert",
|
| 42 |
+
"message": "Evening check-in — How was your day?"
|
| 43 |
+
}
|
| 44 |
+
}'
|
.gitignore
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__pycache__/
|
| 2 |
+
venv/
|
| 3 |
+
.env
|
| 4 |
+
.idea/
|
Dockerfile
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.10
|
| 2 |
+
|
| 3 |
+
WORKDIR /app
|
| 4 |
+
|
| 5 |
+
COPY . .
|
| 6 |
+
|
| 7 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 8 |
+
|
| 9 |
+
EXPOSE 7860
|
| 10 |
+
|
| 11 |
+
CMD ["uvicorn", "src.main:app", "--host", "0.0.0.0", "--port", "7860"]
|
alembic.ini
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# A generic, single database configuration.
|
| 2 |
+
|
| 3 |
+
[alembic]
|
| 4 |
+
# path to migration scripts.
|
| 5 |
+
# this is typically a path given in POSIX (e.g. forward slashes)
|
| 6 |
+
# format, relative to the token %(here)s which refers to the location of this
|
| 7 |
+
# ini file
|
| 8 |
+
script_location = %(here)s/alembic
|
| 9 |
+
|
| 10 |
+
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
| 11 |
+
# Uncomment the line below if you want the files to be prepended with date and time
|
| 12 |
+
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
| 13 |
+
# for all available tokens
|
| 14 |
+
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
| 15 |
+
|
| 16 |
+
# sys.path path, will be prepended to sys.path if present.
|
| 17 |
+
# defaults to the current working directory. for multiple paths, the path separator
|
| 18 |
+
# is defined by "path_separator" below.
|
| 19 |
+
prepend_sys_path = .
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
# timezone to use when rendering the date within the migration file
|
| 23 |
+
# as well as the filename.
|
| 24 |
+
# If specified, requires the tzdata library which can be installed by adding
|
| 25 |
+
# `alembic[tz]` to the pip requirements.
|
| 26 |
+
# string value is passed to ZoneInfo()
|
| 27 |
+
# leave blank for localtime
|
| 28 |
+
# timezone =
|
| 29 |
+
|
| 30 |
+
# max length of characters to apply to the "slug" field
|
| 31 |
+
# truncate_slug_length = 40
|
| 32 |
+
|
| 33 |
+
# set to 'true' to run the environment during
|
| 34 |
+
# the 'revision' command, regardless of autogenerate
|
| 35 |
+
# revision_environment = false
|
| 36 |
+
|
| 37 |
+
# set to 'true' to allow .pyc and .pyo files without
|
| 38 |
+
# a source .py file to be detected as revisions in the
|
| 39 |
+
# versions/ directory
|
| 40 |
+
# sourceless = false
|
| 41 |
+
|
| 42 |
+
# version location specification; This defaults
|
| 43 |
+
# to <script_location>/versions. When using multiple version
|
| 44 |
+
# directories, initial revisions must be specified with --version-path.
|
| 45 |
+
# The path separator used here should be the separator specified by "path_separator"
|
| 46 |
+
# below.
|
| 47 |
+
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
| 48 |
+
|
| 49 |
+
# path_separator; This indicates what character is used to split lists of file
|
| 50 |
+
# paths, including version_locations and prepend_sys_path within configparser
|
| 51 |
+
# files such as alembic.ini.
|
| 52 |
+
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
| 53 |
+
# to provide os-dependent path splitting.
|
| 54 |
+
#
|
| 55 |
+
# Note that in order to support legacy alembic.ini files, this default does NOT
|
| 56 |
+
# take place if path_separator is not present in alembic.ini. If this
|
| 57 |
+
# option is omitted entirely, fallback logic is as follows:
|
| 58 |
+
#
|
| 59 |
+
# 1. Parsing of the version_locations option falls back to using the legacy
|
| 60 |
+
# "version_path_separator" key, which if absent then falls back to the legacy
|
| 61 |
+
# behavior of splitting on spaces and/or commas.
|
| 62 |
+
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
| 63 |
+
# behavior of splitting on spaces, commas, or colons.
|
| 64 |
+
#
|
| 65 |
+
# Valid values for path_separator are:
|
| 66 |
+
#
|
| 67 |
+
# path_separator = :
|
| 68 |
+
# path_separator = ;
|
| 69 |
+
# path_separator = space
|
| 70 |
+
# path_separator = newline
|
| 71 |
+
#
|
| 72 |
+
# Use os.pathsep. Default configuration used for new projects.
|
| 73 |
+
path_separator = os
|
| 74 |
+
|
| 75 |
+
# set to 'true' to search source files recursively
|
| 76 |
+
# in each "version_locations" directory
|
| 77 |
+
# new in Alembic version 1.10
|
| 78 |
+
# recursive_version_locations = false
|
| 79 |
+
|
| 80 |
+
# the output encoding used when revision files
|
| 81 |
+
# are written from script.py.mako
|
| 82 |
+
# output_encoding = utf-8
|
| 83 |
+
|
| 84 |
+
# database URL. This is consumed by the user-maintained env.py script only.
|
| 85 |
+
# other means of configuring database URLs may be customized within the env.py
|
| 86 |
+
# file.
|
| 87 |
+
sqlalchemy.url = driver://user:pass@localhost/dbname
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
[post_write_hooks]
|
| 91 |
+
# post_write_hooks defines scripts or Python functions that are run
|
| 92 |
+
# on newly generated revision scripts. See the documentation for further
|
| 93 |
+
# detail and examples
|
| 94 |
+
|
| 95 |
+
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
| 96 |
+
# hooks = black
|
| 97 |
+
# black.type = console_scripts
|
| 98 |
+
# black.entrypoint = black
|
| 99 |
+
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
| 100 |
+
|
| 101 |
+
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
| 102 |
+
# hooks = ruff
|
| 103 |
+
# ruff.type = module
|
| 104 |
+
# ruff.module = ruff
|
| 105 |
+
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
| 106 |
+
|
| 107 |
+
# Alternatively, use the exec runner to execute a binary found on your PATH
|
| 108 |
+
# hooks = ruff
|
| 109 |
+
# ruff.type = exec
|
| 110 |
+
# ruff.executable = ruff
|
| 111 |
+
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
| 112 |
+
|
| 113 |
+
# Logging configuration. This is also consumed by the user-maintained
|
| 114 |
+
# env.py script only.
|
| 115 |
+
[loggers]
|
| 116 |
+
keys = root,sqlalchemy,alembic
|
| 117 |
+
|
| 118 |
+
[handlers]
|
| 119 |
+
keys = console
|
| 120 |
+
|
| 121 |
+
[formatters]
|
| 122 |
+
keys = generic
|
| 123 |
+
|
| 124 |
+
[logger_root]
|
| 125 |
+
level = WARNING
|
| 126 |
+
handlers = console
|
| 127 |
+
qualname =
|
| 128 |
+
|
| 129 |
+
[logger_sqlalchemy]
|
| 130 |
+
level = WARNING
|
| 131 |
+
handlers =
|
| 132 |
+
qualname = sqlalchemy.engine
|
| 133 |
+
|
| 134 |
+
[logger_alembic]
|
| 135 |
+
level = INFO
|
| 136 |
+
handlers =
|
| 137 |
+
qualname = alembic
|
| 138 |
+
|
| 139 |
+
[handler_console]
|
| 140 |
+
class = StreamHandler
|
| 141 |
+
args = (sys.stderr,)
|
| 142 |
+
level = NOTSET
|
| 143 |
+
formatter = generic
|
| 144 |
+
|
| 145 |
+
[formatter_generic]
|
| 146 |
+
format = %(levelname)-5.5s [%(name)s] %(message)s
|
| 147 |
+
datefmt = %H:%M:%S
|
alembic/README
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
Generic single-database configuration.
|
alembic/env.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from logging.config import fileConfig
|
| 2 |
+
|
| 3 |
+
from sqlalchemy import engine_from_config, pool
|
| 4 |
+
from sqlmodel import SQLModel
|
| 5 |
+
|
| 6 |
+
from alembic import context
|
| 7 |
+
from src.core import *
|
| 8 |
+
from src.core.config import settings
|
| 9 |
+
|
| 10 |
+
# this is the Alembic Config object, which provides
|
| 11 |
+
# access to the values within the .ini file in use.
|
| 12 |
+
config = context.config
|
| 13 |
+
config.set_main_option(name="sqlalchemy.url", value=settings.DATABASE_URL)
|
| 14 |
+
# Interpret the config file for Python logging.
|
| 15 |
+
# This line sets up loggers basically.
|
| 16 |
+
if config.config_file_name is not None:
|
| 17 |
+
fileConfig(config.config_file_name)
|
| 18 |
+
|
| 19 |
+
# add your model's MetaData object here
|
| 20 |
+
# for 'autogenerate' support
|
| 21 |
+
# from myapp import mymodel
|
| 22 |
+
# target_metadata = mymodel.Base.metadata
|
| 23 |
+
target_metadata = SQLModel.metadata
|
| 24 |
+
|
| 25 |
+
# other values from the config, defined by the needs of env.py,
|
| 26 |
+
# can be acquired:
|
| 27 |
+
# my_important_option = config.get_main_option("my_important_option")
|
| 28 |
+
# ... etc.
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def run_migrations_offline() -> None:
|
| 32 |
+
"""Run migrations in 'offline' mode.
|
| 33 |
+
|
| 34 |
+
This configures the context with just a URL
|
| 35 |
+
and not an Engine, though an Engine is acceptable
|
| 36 |
+
here as well. By skipping the Engine creation
|
| 37 |
+
we don't even need a DBAPI to be available.
|
| 38 |
+
|
| 39 |
+
Calls to context.execute() here emit the given string to the
|
| 40 |
+
script output.
|
| 41 |
+
|
| 42 |
+
"""
|
| 43 |
+
url = config.get_main_option("sqlalchemy.url")
|
| 44 |
+
context.configure(
|
| 45 |
+
url=url,
|
| 46 |
+
target_metadata=target_metadata,
|
| 47 |
+
literal_binds=True,
|
| 48 |
+
dialect_opts={"paramstyle": "named"},
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
with context.begin_transaction():
|
| 52 |
+
context.run_migrations()
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def run_migrations_online() -> None:
|
| 56 |
+
"""Run migrations in 'online' mode.
|
| 57 |
+
|
| 58 |
+
In this scenario we need to create an Engine
|
| 59 |
+
and associate a connection with the context.
|
| 60 |
+
|
| 61 |
+
"""
|
| 62 |
+
connectable = engine_from_config(
|
| 63 |
+
config.get_section(config.config_ini_section, {}),
|
| 64 |
+
prefix="sqlalchemy.",
|
| 65 |
+
poolclass=pool.NullPool,
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
with connectable.connect() as connection:
|
| 69 |
+
context.configure(connection=connection, target_metadata=target_metadata)
|
| 70 |
+
|
| 71 |
+
with context.begin_transaction():
|
| 72 |
+
context.run_migrations()
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
if context.is_offline_mode():
|
| 76 |
+
run_migrations_offline()
|
| 77 |
+
else:
|
| 78 |
+
run_migrations_online()
|
alembic/script.py.mako
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""${message}
|
| 2 |
+
|
| 3 |
+
Revision ID: ${up_revision}
|
| 4 |
+
Revises: ${down_revision | comma,n}
|
| 5 |
+
Create Date: ${create_date}
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
${imports if imports else ""}
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = ${repr(up_revision)}
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
${upgrades if upgrades else "pass"}
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def downgrade() -> None:
|
| 28 |
+
"""Downgrade schema."""
|
| 29 |
+
${downgrades if downgrades else "pass"}
|
alembic/versions/1cbb7317540a_fix_water_log_id_uuid.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Sequence, Union
|
| 2 |
+
|
| 3 |
+
from alembic import op
|
| 4 |
+
import sqlalchemy as sa
|
| 5 |
+
import uuid
|
| 6 |
+
|
| 7 |
+
# revision identifiers, used by Alembic.
|
| 8 |
+
revision: str = '1cbb7317540a'
|
| 9 |
+
down_revision: Union[str, Sequence[str], None] = '9176b44b89ba'
|
| 10 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 11 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def upgrade() -> None:
|
| 15 |
+
"""Upgrade schema."""
|
| 16 |
+
# Drop the old id column (this will remove the INTEGER id column)
|
| 17 |
+
op.drop_column('water_logs', 'id')
|
| 18 |
+
|
| 19 |
+
# Add the new UUID id column
|
| 20 |
+
op.add_column('water_logs', sa.Column('id', sa.UUID(), nullable=False, primary_key=True, default=uuid.uuid4))
|
| 21 |
+
|
| 22 |
+
# Optional: If you had any foreign key constraints or other indexes on the old id, you might need to add them back.
|
| 23 |
+
|
| 24 |
+
def downgrade() -> None:
|
| 25 |
+
"""Downgrade schema."""
|
| 26 |
+
# Drop the UUID id column
|
| 27 |
+
op.drop_column('water_logs', 'id')
|
| 28 |
+
|
| 29 |
+
# Add the original INTEGER id column back
|
| 30 |
+
op.add_column('water_logs', sa.Column('id', sa.Integer(), nullable=False, primary_key=True))
|
| 31 |
+
|
| 32 |
+
# Optional: Restore any other changes you might have made during the downgrade
|
alembic/versions/1eacc17f4c52_add_leaves_table_and_device_tokens_array.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""add leaves table and device_tokens array
|
| 2 |
+
|
| 3 |
+
Revision ID: 1eacc17f4c52
|
| 4 |
+
Revises: dd61202db14f
|
| 5 |
+
Create Date: 2025-11-18 22:14:31.077909
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
from sqlalchemy.dialects import postgresql
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = '1eacc17f4c52'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = 'dd61202db14f'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 25 |
+
op.add_column('users', sa.Column('device_tokens', postgresql.ARRAY(sa.String()), nullable=True))
|
| 26 |
+
# ### end Alembic commands ###
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def downgrade() -> None:
|
| 30 |
+
"""Downgrade schema."""
|
| 31 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 32 |
+
op.drop_column('users', 'device_tokens')
|
| 33 |
+
# ### end Alembic commands ###
|
alembic/versions/217db60578fa_added_journal_table.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""added journal table
|
| 2 |
+
|
| 3 |
+
Revision ID: 217db60578fa
|
| 4 |
+
Revises: d9b4df655a55
|
| 5 |
+
Create Date: 2025-12-05 14:19:31.722971
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = '217db60578fa'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = 'd9b4df655a55'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 25 |
+
op.create_table('journal_entries',
|
| 26 |
+
sa.Column('id', sa.Uuid(), nullable=False),
|
| 27 |
+
sa.Column('user_id', sa.UUID(), nullable=False),
|
| 28 |
+
sa.Column('title', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
| 29 |
+
sa.Column('content', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
| 30 |
+
sa.Column('journal_date', sa.Date(), nullable=False),
|
| 31 |
+
sa.Column('created_at', sa.DateTime(), nullable=False),
|
| 32 |
+
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
| 33 |
+
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
|
| 34 |
+
sa.PrimaryKeyConstraint('id'),
|
| 35 |
+
sa.UniqueConstraint('user_id', 'journal_date', name='unique_user_date_journal')
|
| 36 |
+
)
|
| 37 |
+
# ### end Alembic commands ###
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def downgrade() -> None:
|
| 41 |
+
"""Downgrade schema."""
|
| 42 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 43 |
+
op.drop_table('journal_entries')
|
| 44 |
+
# ### end Alembic commands ###
|
alembic/versions/52828bff621c_updated_leave_status_enum_cancelled.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""updated leave_status enum CANCELLED
|
| 2 |
+
|
| 3 |
+
Revision ID: 52828bff621c
|
| 4 |
+
Revises: d9bb355538fd
|
| 5 |
+
Create Date: 2025-11-25 20:04:09.823594
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = '52828bff621c'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = 'd9bb355538fd'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
pass
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def downgrade() -> None:
|
| 28 |
+
"""Downgrade schema."""
|
| 29 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 30 |
+
pass
|
| 31 |
+
# ### end Alembic commands ###
|
alembic/versions/584a5111e60f_initial_migration.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""initial migration
|
| 2 |
+
|
| 3 |
+
Revision ID: 584a5111e60f
|
| 4 |
+
Revises:
|
| 5 |
+
Create Date: 2025-11-10 23:50:23.367946
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = '584a5111e60f'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = None
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 25 |
+
pass
|
| 26 |
+
# ### end Alembic commands ###
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def downgrade() -> None:
|
| 30 |
+
"""Downgrade schema."""
|
| 31 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 32 |
+
pass
|
| 33 |
+
# ### end Alembic commands ###
|
alembic/versions/5b5a3c7a6255_update_cascade_deletion.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""update: cascade deletion
|
| 2 |
+
|
| 3 |
+
Revision ID: 5b5a3c7a6255
|
| 4 |
+
Revises: 52828bff621c
|
| 5 |
+
Create Date: 2025-11-28 11:27:38.960269
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = '5b5a3c7a6255'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = '52828bff621c'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 25 |
+
op.drop_constraint(op.f('assets_user_id_fkey'), 'assets', type_='foreignkey')
|
| 26 |
+
op.create_foreign_key(None, 'assets', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
| 27 |
+
op.drop_constraint(op.f('comments_user_id_fkey'), 'comments', type_='foreignkey')
|
| 28 |
+
op.drop_constraint(op.f('comments_post_id_fkey'), 'comments', type_='foreignkey')
|
| 29 |
+
op.create_foreign_key(None, 'comments', 'posts', ['post_id'], ['id'], ondelete='CASCADE')
|
| 30 |
+
op.create_foreign_key(None, 'comments', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
| 31 |
+
op.drop_constraint(op.f('emotion_logs_user_id_fkey'), 'emotion_logs', type_='foreignkey')
|
| 32 |
+
op.create_foreign_key(None, 'emotion_logs', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
| 33 |
+
op.drop_constraint(op.f('knowledge_chunk_kb_id_fkey'), 'knowledge_chunk', type_='foreignkey')
|
| 34 |
+
op.create_foreign_key(None, 'knowledge_chunk', 'knowledge_base', ['kb_id'], ['id'], ondelete='CASCADE')
|
| 35 |
+
op.alter_column('leave', 'mentor_id',
|
| 36 |
+
existing_type=sa.UUID(),
|
| 37 |
+
nullable=True)
|
| 38 |
+
op.alter_column('leave', 'lead_id',
|
| 39 |
+
existing_type=sa.UUID(),
|
| 40 |
+
nullable=True)
|
| 41 |
+
op.drop_constraint(op.f('leave_lead_id_fkey'), 'leave', type_='foreignkey')
|
| 42 |
+
op.drop_constraint(op.f('leave_user_id_fkey'), 'leave', type_='foreignkey')
|
| 43 |
+
op.drop_constraint(op.f('leave_mentor_id_fkey'), 'leave', type_='foreignkey')
|
| 44 |
+
op.create_foreign_key(None, 'leave', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
| 45 |
+
op.create_foreign_key(None, 'leave', 'users', ['mentor_id'], ['id'], ondelete='SET NULL')
|
| 46 |
+
op.create_foreign_key(None, 'leave', 'users', ['lead_id'], ['id'], ondelete='SET NULL')
|
| 47 |
+
op.drop_constraint(op.f('likes_user_id_fkey'), 'likes', type_='foreignkey')
|
| 48 |
+
op.drop_constraint(op.f('likes_post_id_fkey'), 'likes', type_='foreignkey')
|
| 49 |
+
op.create_foreign_key(None, 'likes', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
| 50 |
+
op.create_foreign_key(None, 'likes', 'posts', ['post_id'], ['id'], ondelete='CASCADE')
|
| 51 |
+
op.drop_constraint(op.f('user_devices_user_id_fkey'), 'user_devices', type_='foreignkey')
|
| 52 |
+
op.create_foreign_key(None, 'user_devices', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
| 53 |
+
op.drop_constraint(op.f('user_teams_role_team_id_fkey'), 'user_teams_role', type_='foreignkey')
|
| 54 |
+
op.drop_constraint(op.f('user_teams_role_role_id_fkey'), 'user_teams_role', type_='foreignkey')
|
| 55 |
+
op.drop_constraint(op.f('user_teams_role_user_id_fkey'), 'user_teams_role', type_='foreignkey')
|
| 56 |
+
op.create_foreign_key(None, 'user_teams_role', 'teams', ['team_id'], ['id'], ondelete='CASCADE')
|
| 57 |
+
op.create_foreign_key(None, 'user_teams_role', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
| 58 |
+
op.create_foreign_key(None, 'user_teams_role', 'roles', ['role_id'], ['id'], ondelete='CASCADE')
|
| 59 |
+
# ### end Alembic commands ###
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def downgrade() -> None:
|
| 63 |
+
"""Downgrade schema."""
|
| 64 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 65 |
+
op.drop_constraint(None, 'user_teams_role', type_='foreignkey')
|
| 66 |
+
op.drop_constraint(None, 'user_teams_role', type_='foreignkey')
|
| 67 |
+
op.drop_constraint(None, 'user_teams_role', type_='foreignkey')
|
| 68 |
+
op.create_foreign_key(op.f('user_teams_role_user_id_fkey'), 'user_teams_role', 'users', ['user_id'], ['id'])
|
| 69 |
+
op.create_foreign_key(op.f('user_teams_role_role_id_fkey'), 'user_teams_role', 'roles', ['role_id'], ['id'])
|
| 70 |
+
op.create_foreign_key(op.f('user_teams_role_team_id_fkey'), 'user_teams_role', 'teams', ['team_id'], ['id'])
|
| 71 |
+
op.drop_constraint(None, 'user_devices', type_='foreignkey')
|
| 72 |
+
op.create_foreign_key(op.f('user_devices_user_id_fkey'), 'user_devices', 'users', ['user_id'], ['id'])
|
| 73 |
+
op.drop_constraint(None, 'likes', type_='foreignkey')
|
| 74 |
+
op.drop_constraint(None, 'likes', type_='foreignkey')
|
| 75 |
+
op.create_foreign_key(op.f('likes_post_id_fkey'), 'likes', 'posts', ['post_id'], ['id'])
|
| 76 |
+
op.create_foreign_key(op.f('likes_user_id_fkey'), 'likes', 'users', ['user_id'], ['id'])
|
| 77 |
+
op.drop_constraint(None, 'leave', type_='foreignkey')
|
| 78 |
+
op.drop_constraint(None, 'leave', type_='foreignkey')
|
| 79 |
+
op.drop_constraint(None, 'leave', type_='foreignkey')
|
| 80 |
+
op.create_foreign_key(op.f('leave_mentor_id_fkey'), 'leave', 'users', ['mentor_id'], ['id'])
|
| 81 |
+
op.create_foreign_key(op.f('leave_user_id_fkey'), 'leave', 'users', ['user_id'], ['id'])
|
| 82 |
+
op.create_foreign_key(op.f('leave_lead_id_fkey'), 'leave', 'users', ['lead_id'], ['id'])
|
| 83 |
+
op.alter_column('leave', 'lead_id',
|
| 84 |
+
existing_type=sa.UUID(),
|
| 85 |
+
nullable=False)
|
| 86 |
+
op.alter_column('leave', 'mentor_id',
|
| 87 |
+
existing_type=sa.UUID(),
|
| 88 |
+
nullable=False)
|
| 89 |
+
op.drop_constraint(None, 'knowledge_chunk', type_='foreignkey')
|
| 90 |
+
op.create_foreign_key(op.f('knowledge_chunk_kb_id_fkey'), 'knowledge_chunk', 'knowledge_base', ['kb_id'], ['id'])
|
| 91 |
+
op.drop_constraint(None, 'emotion_logs', type_='foreignkey')
|
| 92 |
+
op.create_foreign_key(op.f('emotion_logs_user_id_fkey'), 'emotion_logs', 'users', ['user_id'], ['id'])
|
| 93 |
+
op.drop_constraint(None, 'comments', type_='foreignkey')
|
| 94 |
+
op.drop_constraint(None, 'comments', type_='foreignkey')
|
| 95 |
+
op.create_foreign_key(op.f('comments_post_id_fkey'), 'comments', 'posts', ['post_id'], ['id'])
|
| 96 |
+
op.create_foreign_key(op.f('comments_user_id_fkey'), 'comments', 'users', ['user_id'], ['id'])
|
| 97 |
+
op.drop_constraint(None, 'assets', type_='foreignkey')
|
| 98 |
+
op.create_foreign_key(op.f('assets_user_id_fkey'), 'assets', 'users', ['user_id'], ['id'])
|
| 99 |
+
# ### end Alembic commands ###
|
alembic/versions/9176b44b89ba_added_water_logs_table.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""added: water_logs table
|
| 2 |
+
|
| 3 |
+
Revision ID: 9176b44b89ba
|
| 4 |
+
Revises: e96769f268bc
|
| 5 |
+
Create Date: 2025-11-29 21:28:42.789611
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = '9176b44b89ba'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = 'e96769f268bc'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 25 |
+
op.create_table('water_logs',
|
| 26 |
+
sa.Column('id', sa.Integer(), nullable=False),
|
| 27 |
+
sa.Column('user_id', sa.UUID(), nullable=False),
|
| 28 |
+
sa.Column('amount_ml', sa.Integer(), nullable=False),
|
| 29 |
+
sa.Column('logged_at', sa.DateTime(), nullable=False),
|
| 30 |
+
sa.Column('goal_ml', sa.Integer(), nullable=True),
|
| 31 |
+
sa.Column('recommended_ml', sa.Integer(), nullable=True),
|
| 32 |
+
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
|
| 33 |
+
sa.PrimaryKeyConstraint('id')
|
| 34 |
+
)
|
| 35 |
+
# ### end Alembic commands ###
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def downgrade() -> None:
|
| 39 |
+
"""Downgrade schema."""
|
| 40 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 41 |
+
op.drop_table('water_logs')
|
| 42 |
+
# ### end Alembic commands ###
|
alembic/versions/a3c79664f866_sync_models.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""sync models
|
| 2 |
+
|
| 3 |
+
Revision ID: a3c79664f866
|
| 4 |
+
Revises: 1eacc17f4c52
|
| 5 |
+
Create Date: 2025-11-18 22:23:01.757260
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
from sqlalchemy.dialects import postgresql
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = 'a3c79664f866'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = '1eacc17f4c52'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 25 |
+
op.add_column('users', sa.Column('device_tokens', postgresql.ARRAY(sa.String()), nullable=True))
|
| 26 |
+
# ### end Alembic commands ###
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def downgrade() -> None:
|
| 30 |
+
"""Downgrade schema."""
|
| 31 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 32 |
+
op.drop_column('users', 'device_tokens')
|
| 33 |
+
# ### end Alembic commands ###
|
alembic/versions/b33e3b5b7af9_added_roles.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Added roles
|
| 2 |
+
|
| 3 |
+
Revision ID: b33e3b5b7af9
|
| 4 |
+
Revises: e8066533b622
|
| 5 |
+
Create Date: 2025-11-16 21:10:02.038255
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = 'b33e3b5b7af9'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = 'e8066533b622'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 25 |
+
pass
|
| 26 |
+
# ### end Alembic commands ###
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def downgrade() -> None:
|
| 30 |
+
"""Downgrade schema."""
|
| 31 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 32 |
+
pass
|
| 33 |
+
# ### end Alembic commands ###
|
alembic/versions/d9b4df655a55_updated_emotion_tag.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""updated emotion tag
|
| 2 |
+
|
| 3 |
+
Revision ID: d9b4df655a55
|
| 4 |
+
Revises: fec3872d7eba
|
| 5 |
+
Create Date: 2025-12-04 14:34:22.373838
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = 'd9b4df655a55'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = 'fec3872d7eba'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
emotion_enum = sa.Enum(
|
| 25 |
+
'JOYFUL', 'HAPPY', 'CALM', 'NEUTRAL', 'ANXIOUS', 'SAD', 'FRUSTRATED',
|
| 26 |
+
name='emotion_enum'
|
| 27 |
+
)
|
| 28 |
+
emotion_enum.create(op.get_bind(), checkfirst=True)
|
| 29 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 30 |
+
op.add_column('emotion_logs', sa.Column('morning_emotion', sa.Enum('JOYFUL', 'HAPPY', 'CALM', 'NEUTRAL', 'ANXIOUS', 'SAD', 'FRUSTRATED', name='emotion_enum'), nullable=True))
|
| 31 |
+
op.add_column('emotion_logs', sa.Column('evening_emotion', sa.Enum('JOYFUL', 'HAPPY', 'CALM', 'NEUTRAL', 'ANXIOUS', 'SAD', 'FRUSTRATED', name='emotion_enum'), nullable=True))
|
| 32 |
+
# ### end Alembic commands ###
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def downgrade() -> None:
|
| 36 |
+
"""Downgrade schema."""
|
| 37 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 38 |
+
op.drop_column('emotion_logs', 'evening_emotion')
|
| 39 |
+
op.drop_column('emotion_logs', 'morning_emotion')
|
| 40 |
+
emotion_enum = sa.Enum(
|
| 41 |
+
'JOYFUL', 'HAPPY', 'CALM', 'NEUTRAL', 'ANXIOUS', 'SAD', 'FRUSTRATED',
|
| 42 |
+
name='emotion_enum'
|
| 43 |
+
)
|
| 44 |
+
emotion_enum.drop(op.get_bind(), checkfirst=True)
|
| 45 |
+
# ### end Alembic commands ###
|
alembic/versions/d9bb355538fd_add_is_read_to_leave_table.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""add is_read to leave table
|
| 2 |
+
|
| 3 |
+
Revision ID: d9bb355538fd
|
| 4 |
+
Revises: e95f62f91348
|
| 5 |
+
Create Date: 2025-11-25 16:43:45.584602
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = 'd9bb355538fd'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = 'e95f62f91348'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade():
|
| 23 |
+
op.add_column(
|
| 24 |
+
"leave",
|
| 25 |
+
sa.Column(
|
| 26 |
+
"is_read",
|
| 27 |
+
sa.Boolean(),
|
| 28 |
+
nullable=False,
|
| 29 |
+
server_default=sa.false()
|
| 30 |
+
)
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
def downgrade():
|
| 34 |
+
op.drop_column("leave", "is_read")
|
alembic/versions/dd61202db14f_add_knowledgebase_chunk.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""add: knowledgebase,chunk
|
| 2 |
+
|
| 3 |
+
Revision ID: dd61202db14f
|
| 4 |
+
Revises: b33e3b5b7af9
|
| 5 |
+
Create Date: 2025-11-17 23:28:11.537932
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = 'dd61202db14f'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = 'b33e3b5b7af9'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 25 |
+
pass
|
| 26 |
+
# ### end Alembic commands ###
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def downgrade() -> None:
|
| 30 |
+
"""Downgrade schema."""
|
| 31 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 32 |
+
pass
|
| 33 |
+
# ### end Alembic commands ###
|
alembic/versions/e8066533b622_delete_user_verification_cols.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""delete:user/verification cols
|
| 2 |
+
|
| 3 |
+
Revision ID: e8066533b622
|
| 4 |
+
Revises: 584a5111e60f
|
| 5 |
+
Create Date: 2025-11-11 10:47:38.171691
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from typing import Sequence, Union
|
| 10 |
+
|
| 11 |
+
from alembic import op
|
| 12 |
+
import sqlalchemy as sa
|
| 13 |
+
import sqlmodel.sql.sqltypes
|
| 14 |
+
from sqlalchemy.dialects import postgresql
|
| 15 |
+
|
| 16 |
+
# revision identifiers, used by Alembic.
|
| 17 |
+
revision: str = "e8066533b622"
|
| 18 |
+
down_revision: Union[str, Sequence[str], None] = "584a5111e60f"
|
| 19 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 20 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def upgrade() -> None:
|
| 24 |
+
"""Upgrade schema."""
|
| 25 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 26 |
+
op.drop_column("users", "verification_token")
|
| 27 |
+
op.drop_column("users", "verification_expires_at")
|
| 28 |
+
# ### end Alembic commands ###
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def downgrade() -> None:
|
| 32 |
+
"""Downgrade schema."""
|
| 33 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 34 |
+
op.add_column(
|
| 35 |
+
"users",
|
| 36 |
+
sa.Column(
|
| 37 |
+
"verification_expires_at",
|
| 38 |
+
postgresql.TIMESTAMP(),
|
| 39 |
+
autoincrement=False,
|
| 40 |
+
nullable=True,
|
| 41 |
+
),
|
| 42 |
+
)
|
| 43 |
+
op.add_column(
|
| 44 |
+
"users",
|
| 45 |
+
sa.Column(
|
| 46 |
+
"verification_token", sa.VARCHAR(), autoincrement=False, nullable=True
|
| 47 |
+
),
|
| 48 |
+
)
|
| 49 |
+
# ### end Alembic commands ###
|
alembic/versions/e95f62f91348_added_leave_and_userdevice_table.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""added: leave and userdevice table
|
| 2 |
+
|
| 3 |
+
Revision ID: e95f62f91348
|
| 4 |
+
Revises: a3c79664f866
|
| 5 |
+
Create Date: 2025-11-22 15:42:12.098237
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
from sqlalchemy.dialects import postgresql
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = 'e95f62f91348'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = 'a3c79664f866'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 25 |
+
op.drop_column('users', 'device_tokens')
|
| 26 |
+
# ### end Alembic commands ###
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def downgrade() -> None:
|
| 30 |
+
"""Downgrade schema."""
|
| 31 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 32 |
+
op.add_column('users', sa.Column('device_tokens', postgresql.ARRAY(sa.VARCHAR()), autoincrement=False, nullable=True))
|
| 33 |
+
# ### end Alembic commands ###
|
alembic/versions/e96769f268bc_fix_post_s_user_cascade.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""fix: post's user cascade
|
| 2 |
+
|
| 3 |
+
Revision ID: e96769f268bc
|
| 4 |
+
Revises: 5b5a3c7a6255
|
| 5 |
+
Create Date: 2025-11-28 11:58:45.282329
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = 'e96769f268bc'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = '5b5a3c7a6255'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 25 |
+
op.alter_column('posts', 'user_id',
|
| 26 |
+
existing_type=sa.UUID(),
|
| 27 |
+
nullable=True)
|
| 28 |
+
op.drop_constraint(op.f('posts_user_id_fkey'), 'posts', type_='foreignkey')
|
| 29 |
+
op.create_foreign_key(None, 'posts', 'users', ['user_id'], ['id'], ondelete='SET NULL')
|
| 30 |
+
# ### end Alembic commands ###
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def downgrade() -> None:
|
| 34 |
+
"""Downgrade schema."""
|
| 35 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 36 |
+
op.drop_constraint(None, 'posts', type_='foreignkey')
|
| 37 |
+
op.create_foreign_key(op.f('posts_user_id_fkey'), 'posts', 'users', ['user_id'], ['id'])
|
| 38 |
+
op.alter_column('posts', 'user_id',
|
| 39 |
+
existing_type=sa.UUID(),
|
| 40 |
+
nullable=False)
|
| 41 |
+
# ### end Alembic commands ###
|
alembic/versions/f6a1d6fc82d0_add_image_url_column_to_kb.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""add: image url column to kb
|
| 2 |
+
|
| 3 |
+
Revision ID: f6a1d6fc82d0
|
| 4 |
+
Revises: 1cbb7317540a
|
| 5 |
+
Create Date: 2025-12-01 16:55:06.113217
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = 'f6a1d6fc82d0'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = '1cbb7317540a'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 25 |
+
op.add_column('knowledge_chunk', sa.Column('image_url', sqlmodel.sql.sqltypes.AutoString(), nullable=True))
|
| 26 |
+
# ### end Alembic commands ###
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def downgrade() -> None:
|
| 30 |
+
"""Downgrade schema."""
|
| 31 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 32 |
+
op.drop_column('knowledge_chunk', 'image_url')
|
| 33 |
+
# ### end Alembic commands ###
|
alembic/versions/fec3872d7eba_add_payslip_table.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""add:payslip table
|
| 2 |
+
|
| 3 |
+
Revision ID: fec3872d7eba
|
| 4 |
+
Revises: f6a1d6fc82d0
|
| 5 |
+
Create Date: 2025-12-04 10:58:25.795533
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel.sql.sqltypes
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = 'fec3872d7eba'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = 'f6a1d6fc82d0'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 25 |
+
op.drop_constraint(op.f('comments_post_id_fkey'), 'comments', type_='foreignkey')
|
| 26 |
+
op.drop_constraint(op.f('comments_user_id_fkey'), 'comments', type_='foreignkey')
|
| 27 |
+
op.create_foreign_key(None, 'comments', 'posts', ['post_id'], ['id'], ondelete='CASCADE')
|
| 28 |
+
op.create_foreign_key(None, 'comments', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
| 29 |
+
op.add_column('users', sa.Column('join_date', sqlmodel.sql.sqltypes.AutoString(), nullable=True))
|
| 30 |
+
# ### end Alembic commands ###
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def downgrade() -> None:
|
| 34 |
+
"""Downgrade schema."""
|
| 35 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 36 |
+
op.drop_column('users', 'join_date')
|
| 37 |
+
op.drop_constraint(None, 'comments', type_='foreignkey')
|
| 38 |
+
op.drop_constraint(None, 'comments', type_='foreignkey')
|
| 39 |
+
op.create_foreign_key(op.f('comments_user_id_fkey'), 'comments', 'users', ['user_id'], ['id'])
|
| 40 |
+
op.create_foreign_key(op.f('comments_post_id_fkey'), 'comments', 'posts', ['post_id'], ['id'])
|
| 41 |
+
# ### end Alembic commands ###
|
queries/Query.sql
ADDED
|
File without changes
|
requirements.txt
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
aiosmtplib==5.0.0
|
| 2 |
+
alembic==1.17.1
|
| 3 |
+
annotated-doc==0.0.3
|
| 4 |
+
annotated-types==0.7.0
|
| 5 |
+
anyio==4.11.0
|
| 6 |
+
asyncpg==0.30.0
|
| 7 |
+
bcrypt==3.2.2
|
| 8 |
+
cachetools==6.2.2
|
| 9 |
+
certifi==2025.11.12
|
| 10 |
+
cffi==2.0.0
|
| 11 |
+
charset-normalizer==3.4.4
|
| 12 |
+
click==8.3.0
|
| 13 |
+
coloredlogs==15.0.1
|
| 14 |
+
cryptography==46.0.3
|
| 15 |
+
dnspython==2.8.0
|
| 16 |
+
ecdsa==0.19.1
|
| 17 |
+
email-validator==2.3.0
|
| 18 |
+
fastapi==0.121.0
|
| 19 |
+
filelock==3.20.0
|
| 20 |
+
flatbuffers==25.9.23
|
| 21 |
+
fsspec==2025.10.0
|
| 22 |
+
google-api-core==2.28.1
|
| 23 |
+
google-api-python-client==2.187.0
|
| 24 |
+
google-auth==2.41.1
|
| 25 |
+
google-auth-httplib2==0.2.1
|
| 26 |
+
google-auth-oauthlib==1.2.3
|
| 27 |
+
googleapis-common-protos==1.72.0
|
| 28 |
+
greenlet==3.2.4
|
| 29 |
+
h11==0.16.0
|
| 30 |
+
hf-xet==1.2.0
|
| 31 |
+
httpcore==1.0.9
|
| 32 |
+
httplib2==0.31.0
|
| 33 |
+
httpx==0.28.1
|
| 34 |
+
huggingface-hub==0.36.0
|
| 35 |
+
humanfriendly==10.0
|
| 36 |
+
idna==3.11
|
| 37 |
+
Mako==1.3.10
|
| 38 |
+
MarkupSafe==3.0.3
|
| 39 |
+
mpmath==1.3.0
|
| 40 |
+
numpy==2.2.6
|
| 41 |
+
oauthlib==3.3.1
|
| 42 |
+
onnxruntime==1.23.2
|
| 43 |
+
packaging==25.0
|
| 44 |
+
passlib==1.7.4
|
| 45 |
+
pgvector==0.4.1
|
| 46 |
+
proto-plus==1.26.1
|
| 47 |
+
protobuf==6.33.1
|
| 48 |
+
psycopg2-binary==2.9.11
|
| 49 |
+
pyasn1==0.6.1
|
| 50 |
+
pyasn1_modules==0.4.2
|
| 51 |
+
pycparser==2.23
|
| 52 |
+
pydantic==2.12.4
|
| 53 |
+
pydantic-settings==2.12.0
|
| 54 |
+
pydantic_core==2.41.5
|
| 55 |
+
pyparsing==3.2.5
|
| 56 |
+
PyPDF2==3.0.1
|
| 57 |
+
python-dateutil==2.9.0.post0
|
| 58 |
+
python-dotenv==1.2.1
|
| 59 |
+
python-jose==3.5.0
|
| 60 |
+
python-multipart==0.0.20
|
| 61 |
+
PyYAML==6.0.3
|
| 62 |
+
regex==2025.11.3
|
| 63 |
+
requests==2.32.5
|
| 64 |
+
requests-oauthlib==2.0.0
|
| 65 |
+
rsa==4.9.1
|
| 66 |
+
safetensors==0.6.2
|
| 67 |
+
six==1.17.0
|
| 68 |
+
sniffio==1.3.1
|
| 69 |
+
SQLAlchemy==2.0.44
|
| 70 |
+
sqlmodel==0.0.27
|
| 71 |
+
starlette==0.49.3
|
| 72 |
+
sympy==1.14.0
|
| 73 |
+
tokenizers==0.22.1
|
| 74 |
+
tqdm==4.67.1
|
| 75 |
+
transformers==4.57.1
|
| 76 |
+
typing-inspection==0.4.2
|
| 77 |
+
typing_extensions==4.15.0
|
| 78 |
+
uritemplate==4.2.0
|
| 79 |
+
urllib3==2.5.0
|
| 80 |
+
uvicorn==0.38.0
|
src/auth/__init__.py
ADDED
|
File without changes
|
src/auth/config.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from pydantic import BaseSettings
|
| 3 |
+
from dotenv import load_dotenv
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class HomeSettings(BaseSettings):
|
| 7 |
+
FEATURE_ENABLED: bool = True
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
home_settings = HomeSettings()
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
load_dotenv()
|
| 14 |
+
SECRET_KEY = os.getenv("SECRET_KEY")
|
| 15 |
+
|
| 16 |
+
ALGORITHM = "HS256"
|
| 17 |
+
ACCESS_TOKEN_EXPIRE_MINUTES = 60
|
src/auth/constants.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
WELCOME_MESSAGE = "Welcome to Yuvabe's Home Screen"
|
| 2 |
+
EXIT_MESSAGE = "Thank You have a wonderful day"
|
src/auth/dependencies.py
ADDED
|
File without changes
|
src/auth/exceptions.py
ADDED
|
File without changes
|
src/auth/feed_db_script.py
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from src.auth.utils import hash_password
|
| 2 |
+
from datetime import date
|
| 3 |
+
from sqlmodel import Session
|
| 4 |
+
|
| 5 |
+
from src.core.database import engine
|
| 6 |
+
from src.core.models import Users, Teams, Roles, UserTeamsRole
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
# ------------------------
|
| 10 |
+
# 1. Seed Users
|
| 11 |
+
# ------------------------
|
| 12 |
+
def seed_users(session: Session):
|
| 13 |
+
users = [
|
| 14 |
+
Users(
|
| 15 |
+
email_id="[email protected]",
|
| 16 |
+
password=hash_password("Yuvabe"),
|
| 17 |
+
user_name="ragul",
|
| 18 |
+
dob=date(2001, 5, 21),
|
| 19 |
+
address="Chennai",
|
| 20 |
+
profile_picture="ragul.png",
|
| 21 |
+
),
|
| 22 |
+
Users(
|
| 23 |
+
email_id="[email protected]",
|
| 24 |
+
password=hash_password("Yuvabe"),
|
| 25 |
+
user_name="Shri",
|
| 26 |
+
dob=date(1999, 3, 14),
|
| 27 |
+
address="Chennai",
|
| 28 |
+
profile_picture="shri.png",
|
| 29 |
+
),
|
| 30 |
+
Users(
|
| 31 |
+
email_id="[email protected]",
|
| 32 |
+
password=hash_password("Yuvabe"),
|
| 33 |
+
user_name="Sathish",
|
| 34 |
+
dob=date(1998, 7, 10),
|
| 35 |
+
address="Chennai",
|
| 36 |
+
profile_picture="Sathish.png",
|
| 37 |
+
),
|
| 38 |
+
Users(
|
| 39 |
+
email_id="[email protected]",
|
| 40 |
+
password=hash_password("Yuvabe"),
|
| 41 |
+
user_name="Deepika",
|
| 42 |
+
dob=date(1997, 2, 5),
|
| 43 |
+
address="Chennai",
|
| 44 |
+
profile_picture="deepika.png",
|
| 45 |
+
),
|
| 46 |
+
]
|
| 47 |
+
|
| 48 |
+
session.add_all(users)
|
| 49 |
+
session.commit()
|
| 50 |
+
print("Users added.")
|
| 51 |
+
return users
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
# ------------------------
|
| 55 |
+
# 2. Seed Teams
|
| 56 |
+
# ------------------------
|
| 57 |
+
def seed_teams(session: Session):
|
| 58 |
+
teams = [
|
| 59 |
+
Teams(name="Tech Team"),
|
| 60 |
+
Teams(name="HR Team"),
|
| 61 |
+
]
|
| 62 |
+
session.add_all(teams)
|
| 63 |
+
session.commit()
|
| 64 |
+
print("Teams added.")
|
| 65 |
+
return teams
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
# ------------------------
|
| 69 |
+
# 3. Seed Roles
|
| 70 |
+
# ------------------------
|
| 71 |
+
def seed_roles(session: Session):
|
| 72 |
+
roles = [
|
| 73 |
+
Roles(name="Developer"),
|
| 74 |
+
Roles(name="Team Lead"),
|
| 75 |
+
Roles(name="HR Manager"),
|
| 76 |
+
]
|
| 77 |
+
session.add_all(roles)
|
| 78 |
+
session.commit()
|
| 79 |
+
print("Roles added.")
|
| 80 |
+
return roles
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
# ------------------------
|
| 84 |
+
# 4. Map Users → Teams → Roles
|
| 85 |
+
# ------------------------
|
| 86 |
+
def seed_user_teams_roles(session: Session, users, teams, roles):
|
| 87 |
+
mappings = [
|
| 88 |
+
# Hari → Tech Team → Developer
|
| 89 |
+
UserTeamsRole(
|
| 90 |
+
user_id=users[0].id, # Hari
|
| 91 |
+
team_id=teams[0].id, # Tech Team
|
| 92 |
+
role_id=roles[0].id, # Developer
|
| 93 |
+
),
|
| 94 |
+
# Shri → Tech Team → Team Lead
|
| 95 |
+
UserTeamsRole(
|
| 96 |
+
user_id=users[1].id, # Shri
|
| 97 |
+
team_id=teams[0].id, # Tech Team
|
| 98 |
+
role_id=roles[1].id, # Team Lead
|
| 99 |
+
),
|
| 100 |
+
# HR Keerthana
|
| 101 |
+
UserTeamsRole(
|
| 102 |
+
user_id=users[2].id, # Keerthana
|
| 103 |
+
team_id=teams[1].id, # HR Team
|
| 104 |
+
role_id=roles[2].id, # HR Manager
|
| 105 |
+
),
|
| 106 |
+
# HR Deepika
|
| 107 |
+
UserTeamsRole(
|
| 108 |
+
user_id=users[3].id, # Deepika
|
| 109 |
+
team_id=teams[1].id, # HR Team
|
| 110 |
+
role_id=roles[2].id, # HR Manager
|
| 111 |
+
),
|
| 112 |
+
]
|
| 113 |
+
|
| 114 |
+
session.add_all(mappings)
|
| 115 |
+
session.commit()
|
| 116 |
+
print("User-Team-Role mappings added.")
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
# ------------------------
|
| 120 |
+
# 5. Master Runner
|
| 121 |
+
# ------------------------
|
| 122 |
+
def run_all_seeds():
|
| 123 |
+
with Session(engine) as session:
|
| 124 |
+
users = seed_users(session)
|
| 125 |
+
teams = seed_teams(session)
|
| 126 |
+
roles = seed_roles(session)
|
| 127 |
+
seed_user_teams_roles(session, users, teams, roles)
|
| 128 |
+
print("All data seeded successfully!")
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
if __name__ == "__main__":
|
| 132 |
+
run_all_seeds()
|
src/auth/models.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uuid
|
| 2 |
+
import sqlmodel
|
src/auth/router.py
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uuid
|
| 2 |
+
from src.core.database import get_async_session
|
| 3 |
+
from fastapi import APIRouter, Depends, HTTPException, status
|
| 4 |
+
from jose import jwt, JWTError
|
| 5 |
+
from fastapi import APIRouter, Depends, HTTPException
|
| 6 |
+
from sqlmodel import Session
|
| 7 |
+
from sqlmodel.ext.asyncio.session import AsyncSession
|
| 8 |
+
from src.auth.service import (
|
| 9 |
+
create_user,
|
| 10 |
+
verify_email,
|
| 11 |
+
login_user,
|
| 12 |
+
)
|
| 13 |
+
from src.auth.utils import get_current_user
|
| 14 |
+
from src.core.models import Users, Roles, UserTeamsRole
|
| 15 |
+
from sqlmodel import select
|
| 16 |
+
from src.core.config import settings
|
| 17 |
+
from fastapi.responses import RedirectResponse
|
| 18 |
+
from .schemas import SignUpRequest, LoginRequest, BaseResponse, SendVerificationRequest
|
| 19 |
+
from fastapi.security import OAuth2PasswordRequestForm
|
| 20 |
+
from src.auth.utils import create_access_token
|
| 21 |
+
from jose import jwt, JWTError
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
router = APIRouter(prefix="/auth", tags=["Auth"])
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
@router.post("/signup", response_model=BaseResponse)
|
| 28 |
+
async def signup(
|
| 29 |
+
payload: SignUpRequest, session: AsyncSession = Depends(get_async_session)
|
| 30 |
+
):
|
| 31 |
+
try:
|
| 32 |
+
response = await create_user(
|
| 33 |
+
session, payload.name, payload.email, payload.password
|
| 34 |
+
)
|
| 35 |
+
return {"code": 200, "data": response}
|
| 36 |
+
except ValueError as e:
|
| 37 |
+
raise HTTPException(status_code=400, detail=str(e))
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
# @router.post("/send-verification", response_model=BaseResponse)
|
| 41 |
+
# async def send_verification(
|
| 42 |
+
# payload: SendVerificationRequest, session: AsyncSession = Depends(get_async_session)
|
| 43 |
+
# ):
|
| 44 |
+
# if not payload.email:
|
| 45 |
+
# raise HTTPException(status_code=400, detail="Email is required")
|
| 46 |
+
|
| 47 |
+
# response = await send_verification_link(session, payload.email)
|
| 48 |
+
# return {"code": 200, "data": response}
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
# @router.get("/verify-email", response_model=BaseResponse)
|
| 52 |
+
# async def verify_email_route(
|
| 53 |
+
# token: str, session: AsyncSession = Depends(get_async_session)
|
| 54 |
+
# ):
|
| 55 |
+
# response = await verify_email(session, token)
|
| 56 |
+
# access_token = response["access_token"]
|
| 57 |
+
# redirect_url = f"yuvabe://verified?token={access_token}"
|
| 58 |
+
|
| 59 |
+
# return RedirectResponse(url=redirect_url)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
@router.post("/login", response_model=BaseResponse)
|
| 63 |
+
async def login(
|
| 64 |
+
payload: LoginRequest, session: AsyncSession = Depends(get_async_session)
|
| 65 |
+
):
|
| 66 |
+
response = await login_user(session, payload.email, payload.password)
|
| 67 |
+
|
| 68 |
+
user_id = response["user"]["id"]
|
| 69 |
+
|
| 70 |
+
# 🔥 Fetch User Role
|
| 71 |
+
result = await session.exec(
|
| 72 |
+
select(Roles)
|
| 73 |
+
.join(UserTeamsRole, UserTeamsRole.role_id == Roles.id)
|
| 74 |
+
.where(UserTeamsRole.user_id == uuid.UUID(user_id))
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
role_obj = result.first()
|
| 78 |
+
role_name = role_obj.name if role_obj else "Member"
|
| 79 |
+
|
| 80 |
+
response["user"]["role"] = (role_name or "member").lower()
|
| 81 |
+
|
| 82 |
+
return {"code": 200, "data": response}
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
@router.post("/refresh", response_model=BaseResponse)
|
| 86 |
+
async def refresh_token(request: dict):
|
| 87 |
+
"""Generate new access token using refresh token"""
|
| 88 |
+
refresh_token = request.get("refresh_token")
|
| 89 |
+
if not refresh_token:
|
| 90 |
+
raise HTTPException(status_code=400, detail="Refresh token is required")
|
| 91 |
+
|
| 92 |
+
try:
|
| 93 |
+
payload = jwt.decode(
|
| 94 |
+
refresh_token, settings.SECRET_KEY, algorithms=[settings.JWT_ALGORITHM]
|
| 95 |
+
)
|
| 96 |
+
if payload.get("type") != "refresh":
|
| 97 |
+
raise HTTPException(status_code=400, detail="Invalid refresh token")
|
| 98 |
+
|
| 99 |
+
user_data = {
|
| 100 |
+
"sub": payload["sub"],
|
| 101 |
+
"name": payload.get("name"),
|
| 102 |
+
"email": payload.get("email"),
|
| 103 |
+
}
|
| 104 |
+
new_access_token = create_access_token(data=user_data)
|
| 105 |
+
return {"code": 200, "data": {"access_token": new_access_token}}
|
| 106 |
+
|
| 107 |
+
except JWTError:
|
| 108 |
+
raise HTTPException(status_code=401, detail="Invalid or expired refresh token")
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
@router.get("/home", response_model=BaseResponse)
|
| 112 |
+
async def get_home(
|
| 113 |
+
user_id: str = Depends(get_current_user),
|
| 114 |
+
session: AsyncSession = Depends(get_async_session),
|
| 115 |
+
):
|
| 116 |
+
"""
|
| 117 |
+
Protected home endpoint. Requires a valid access token (Bearer).
|
| 118 |
+
"""
|
| 119 |
+
user = await session.get(Users, uuid.UUID(user_id))
|
| 120 |
+
if not user:
|
| 121 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 122 |
+
|
| 123 |
+
role_join = await session.exec(
|
| 124 |
+
select(Roles.name)
|
| 125 |
+
.join(UserTeamsRole, UserTeamsRole.role_id == Roles.id)
|
| 126 |
+
.where(UserTeamsRole.user_id == uuid.UUID(user_id))
|
| 127 |
+
)
|
| 128 |
+
user_role = role_join.first() or "Member"
|
| 129 |
+
|
| 130 |
+
# Example payload — replace with your real app data
|
| 131 |
+
return {
|
| 132 |
+
"code": 200,
|
| 133 |
+
"data": {
|
| 134 |
+
"message": f"Welcome to Home, {user.user_name}!",
|
| 135 |
+
"user": {
|
| 136 |
+
"id": str(user.id),
|
| 137 |
+
"name": user.user_name,
|
| 138 |
+
"email": user.email_id,
|
| 139 |
+
"is_verified": user.is_verified,
|
| 140 |
+
"dob": user.dob.isoformat() if user.dob else None,
|
| 141 |
+
"profile_picture": user.profile_picture,
|
| 142 |
+
"role": user_role.lower(),
|
| 143 |
+
},
|
| 144 |
+
"home_data": {
|
| 145 |
+
"announcements": ["Welcome!", "New protocol released"],
|
| 146 |
+
"timestamp": user.created_at.isoformat() if user.created_at else None,
|
| 147 |
+
},
|
| 148 |
+
},
|
| 149 |
+
}
|
src/auth/schemas.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pydantic import BaseModel ,EmailStr
|
| 2 |
+
from typing import Optional, Union, Dict
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class SignUpRequest(BaseModel):
|
| 6 |
+
name: str
|
| 7 |
+
email: str
|
| 8 |
+
password: str
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class VerifyOtpRequest(BaseModel):
|
| 12 |
+
email: str
|
| 13 |
+
otp: str
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class LoginRequest(BaseModel):
|
| 17 |
+
email: str
|
| 18 |
+
password: str
|
| 19 |
+
|
| 20 |
+
class SendVerificationRequest(BaseModel):
|
| 21 |
+
email: EmailStr
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class UserResponse(BaseModel):
|
| 25 |
+
id: str
|
| 26 |
+
name: str
|
| 27 |
+
email: str
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class LoginResponseData(BaseModel):
|
| 31 |
+
access_token: str
|
| 32 |
+
token_type: str
|
| 33 |
+
user: UserResponse
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class BaseResponse(BaseModel):
|
| 37 |
+
code: int
|
| 38 |
+
data: Optional[Union[Dict, str, None]] = None
|
src/auth/service.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uuid
|
| 2 |
+
from src.auth.utils import (
|
| 3 |
+
# send_otp_email,
|
| 4 |
+
verify_password,
|
| 5 |
+
create_refresh_token,
|
| 6 |
+
verify_verification_token,
|
| 7 |
+
create_access_token,
|
| 8 |
+
hash_password,
|
| 9 |
+
create_verification_token,
|
| 10 |
+
)
|
| 11 |
+
from src.core.models import Users
|
| 12 |
+
from sqlmodel import Session, select
|
| 13 |
+
from fastapi import HTTPException
|
| 14 |
+
from sqlmodel.ext.asyncio.session import AsyncSession
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
async def create_user(session: AsyncSession, name: str, email: str, password: str):
|
| 18 |
+
"""Create user without sending email"""
|
| 19 |
+
|
| 20 |
+
if not email.lower().endswith("@yuvabe.com"):
|
| 21 |
+
raise HTTPException(status_code=400, detail="Enter you're Yuvabe email ID")
|
| 22 |
+
|
| 23 |
+
user = await session.exec(select(Users).where(Users.email_id == email))
|
| 24 |
+
existing_user = user.first()
|
| 25 |
+
if existing_user:
|
| 26 |
+
raise ValueError("User already exists")
|
| 27 |
+
|
| 28 |
+
new_user = Users(
|
| 29 |
+
user_name=name,
|
| 30 |
+
email_id=email,
|
| 31 |
+
password=hash_password(password),
|
| 32 |
+
is_verified=True,
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
session.add(new_user)
|
| 36 |
+
await session.commit()
|
| 37 |
+
await session.refresh(new_user)
|
| 38 |
+
|
| 39 |
+
access_token = create_access_token(
|
| 40 |
+
data={
|
| 41 |
+
"sub": str(new_user.id),
|
| 42 |
+
"name": new_user.user_name,
|
| 43 |
+
"email": new_user.email_id,
|
| 44 |
+
}
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
refresh_token = create_refresh_token(
|
| 48 |
+
data={
|
| 49 |
+
"sub": str(new_user.id),
|
| 50 |
+
"name": new_user.user_name,
|
| 51 |
+
"email": new_user.email_id,
|
| 52 |
+
}
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
return {
|
| 56 |
+
"message": "User created successfully",
|
| 57 |
+
"user_id": str(new_user.id),
|
| 58 |
+
"access_token": access_token,
|
| 59 |
+
"refresh_token": refresh_token,
|
| 60 |
+
}
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
# async def send_verification_link(session: Session, email: str):
|
| 64 |
+
# """Send verification email for an existing user."""
|
| 65 |
+
# result = await session.exec(select(Users).where(Users.email_id == email))
|
| 66 |
+
# user = result.first()
|
| 67 |
+
|
| 68 |
+
# if not user:
|
| 69 |
+
# raise HTTPException(status_code=404, detail="User not found")
|
| 70 |
+
|
| 71 |
+
# if user.is_verified:
|
| 72 |
+
# raise HTTPException(status_code=400, detail="User is already verified")
|
| 73 |
+
|
| 74 |
+
# # Create a token using existing user ID (opaque token)
|
| 75 |
+
# token = create_verification_token(str(user.id))
|
| 76 |
+
|
| 77 |
+
# try:
|
| 78 |
+
# send_verification_email(email, token)
|
| 79 |
+
# except Exception as e:
|
| 80 |
+
# raise HTTPException(
|
| 81 |
+
# status_code=500, detail=f"Failed to send verification email: {str(e)}"
|
| 82 |
+
# )
|
| 83 |
+
|
| 84 |
+
# return {
|
| 85 |
+
# "message": "Verification link sent successfully",
|
| 86 |
+
# "user_id": str(user.id),
|
| 87 |
+
# "email": user.email_id,
|
| 88 |
+
# }
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
async def verify_email(session: Session, token: str):
|
| 92 |
+
try:
|
| 93 |
+
user_id = await verify_verification_token(token)
|
| 94 |
+
except ValueError as e:
|
| 95 |
+
raise HTTPException(status_code=400, detail=str(e))
|
| 96 |
+
|
| 97 |
+
user = await session.get(Users, uuid.UUID(user_id))
|
| 98 |
+
if not user:
|
| 99 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 100 |
+
|
| 101 |
+
if not user.is_verified:
|
| 102 |
+
user.is_verified = True
|
| 103 |
+
await session.commit()
|
| 104 |
+
|
| 105 |
+
access_token = create_access_token(
|
| 106 |
+
data={"sub": str(user.id), "name": user.user_name, "email": user.email_id}
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
refresh_token = create_refresh_token(
|
| 110 |
+
data={"sub": str(user.id), "name": user.user_name, "email": user.email_id}
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
return {
|
| 114 |
+
"message": "Email verified successfully!",
|
| 115 |
+
"access_token": access_token,
|
| 116 |
+
"refresh_token": refresh_token,
|
| 117 |
+
"token_type": "bearer",
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
async def login_user(session: Session, email: str, password: str):
|
| 122 |
+
|
| 123 |
+
if not email.lower().endswith("@yuvabe.com"):
|
| 124 |
+
raise HTTPException(status_code=400, detail="Enter you're Yuvabe email ID")
|
| 125 |
+
|
| 126 |
+
users = await session.exec(select(Users).where(Users.email_id == email))
|
| 127 |
+
user = users.first()
|
| 128 |
+
|
| 129 |
+
if not user:
|
| 130 |
+
raise HTTPException(status_code=400, detail="Invalid email or password")
|
| 131 |
+
|
| 132 |
+
if not verify_password(password, user.password):
|
| 133 |
+
raise HTTPException(status_code=400, detail="Invalid email or password")
|
| 134 |
+
|
| 135 |
+
if not user.is_verified:
|
| 136 |
+
raise HTTPException(status_code=400, detail="Verify email to login")
|
| 137 |
+
|
| 138 |
+
access_token = create_access_token(
|
| 139 |
+
data={"sub": str(user.id), "name": user.user_name, "email": user.email_id}
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
refresh_token = create_refresh_token(
|
| 143 |
+
data={"sub": str(user.id), "name": user.user_name, "email": user.email_id}
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
return {
|
| 147 |
+
"access_token": access_token,
|
| 148 |
+
"refresh_token": refresh_token,
|
| 149 |
+
"token_type": "bearer",
|
| 150 |
+
"user": {
|
| 151 |
+
"id": str(user.id),
|
| 152 |
+
"name": user.user_name,
|
| 153 |
+
"email": user.email_id,
|
| 154 |
+
"is_verified": user.is_verified,
|
| 155 |
+
},
|
| 156 |
+
}
|
src/auth/utils.py
ADDED
|
@@ -0,0 +1,206 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import smtplib
|
| 3 |
+
import os
|
| 4 |
+
import uuid
|
| 5 |
+
from email.mime.text import MIMEText
|
| 6 |
+
import logging
|
| 7 |
+
import traceback
|
| 8 |
+
from passlib.context import CryptContext
|
| 9 |
+
from src.core.database import get_async_session
|
| 10 |
+
from sqlmodel.ext.asyncio.session import AsyncSession
|
| 11 |
+
from jose import jwt, JWTError
|
| 12 |
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
| 13 |
+
from datetime import datetime, timedelta
|
| 14 |
+
from cryptography.fernet import Fernet, InvalidToken
|
| 15 |
+
from fastapi import Depends, HTTPException, status
|
| 16 |
+
from src.core.models import Users
|
| 17 |
+
from src.core.config import settings
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
SECRET_KEY = settings.SECRET_KEY
|
| 21 |
+
ALGORITHM = settings.JWT_ALGORITHM
|
| 22 |
+
ACCESS_TOKEN_EXPIRE_MINUTES = settings.JWT_EXPIRE
|
| 23 |
+
logger = logging.getLogger(__name__)
|
| 24 |
+
|
| 25 |
+
SMTP_SERVER = settings.EMAIL_SERVER
|
| 26 |
+
SMTP_PORT = settings.EMAIL_PORT
|
| 27 |
+
SMTP_EMAIL = settings.EMAIL_USERNAME
|
| 28 |
+
SMTP_PASSWORD = settings.EMAIL_PASSWORD
|
| 29 |
+
|
| 30 |
+
FERNET_KEY = settings.FERNET_KEY
|
| 31 |
+
VERIFICATION_BASE_URL = settings.VERIFICATION_BASE_URL
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def hash_password(password: str) -> str:
|
| 38 |
+
"""Encrypt plain password into hashed password"""
|
| 39 |
+
return pwd_context.hash(password)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
| 43 |
+
"""Compare plain password with stored hash"""
|
| 44 |
+
return pwd_context.verify(plain_password, hashed_password)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def create_access_token(data: dict):
|
| 48 |
+
"""Create JWT token with expiry"""
|
| 49 |
+
to_encode = data.copy()
|
| 50 |
+
expire = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
| 51 |
+
to_encode.update({"exp": expire})
|
| 52 |
+
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
| 53 |
+
return encoded_jwt
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
# def send_verification_email(to_email: str, token: str):
|
| 57 |
+
# """Send verification email using smtplib with detailed debug logs."""
|
| 58 |
+
# subject = f"Verify your {settings.APP_NAME} Account"
|
| 59 |
+
# verification_link = f"{VERIFICATION_BASE_URL}/auth/verify-email?token={token}"
|
| 60 |
+
|
| 61 |
+
# body = f"""
|
| 62 |
+
# Hi,
|
| 63 |
+
|
| 64 |
+
# Please verify your {settings.APP_NAME} account by clicking the link below:
|
| 65 |
+
# {verification_link}
|
| 66 |
+
|
| 67 |
+
# This link will expire in 24 hours.
|
| 68 |
+
|
| 69 |
+
# Regards,
|
| 70 |
+
# {settings.APP_NAME} Team
|
| 71 |
+
# """
|
| 72 |
+
|
| 73 |
+
# msg = MIMEText(body)
|
| 74 |
+
# msg["Subject"] = subject
|
| 75 |
+
# msg["From"] = SMTP_EMAIL
|
| 76 |
+
# msg["To"] = to_email
|
| 77 |
+
|
| 78 |
+
# logger.info("🟢 Starting send_verification_email()")
|
| 79 |
+
# logger.info(f"📨 To: {to_email}")
|
| 80 |
+
# logger.info(f"📤 SMTP Server: {SMTP_SERVER}:{SMTP_PORT}")
|
| 81 |
+
|
| 82 |
+
# try:
|
| 83 |
+
# logger.info("🔌 Connecting to SMTP server...")
|
| 84 |
+
# with smtplib.SMTP(SMTP_SERVER, SMTP_PORT, timeout=30) as server:
|
| 85 |
+
# logger.info("✅ Connected successfully.")
|
| 86 |
+
|
| 87 |
+
# logger.info("🔒 Starting TLS...")
|
| 88 |
+
# server.starttls()
|
| 89 |
+
# logger.info("✅ TLS secured.")
|
| 90 |
+
|
| 91 |
+
# logger.info("🔑 Logging in to SMTP server...")
|
| 92 |
+
# server.login(SMTP_EMAIL, SMTP_PASSWORD)
|
| 93 |
+
# logger.info("✅ Logged in successfully.")
|
| 94 |
+
|
| 95 |
+
# # Send email
|
| 96 |
+
# logger.info("📧 Sending email message...")
|
| 97 |
+
# server.send_message(msg)
|
| 98 |
+
# logger.info(f"✅ Email successfully sent to {to_email}")
|
| 99 |
+
|
| 100 |
+
# except smtplib.SMTPAuthenticationError as e:
|
| 101 |
+
# logger.error("❌ Authentication failed — check email or app password.")
|
| 102 |
+
# logger.error(f"Error details: {e}")
|
| 103 |
+
# logger.error(traceback.format_exc())
|
| 104 |
+
# raise
|
| 105 |
+
# except smtplib.SMTPConnectError as e:
|
| 106 |
+
# logger.error("❌ Could not connect to SMTP server.")
|
| 107 |
+
# logger.error(f"Error details: {e}")
|
| 108 |
+
# logger.error(traceback.format_exc())
|
| 109 |
+
# raise
|
| 110 |
+
# except smtplib.SMTPRecipientsRefused as e:
|
| 111 |
+
# logger.error("❌ Recipient address refused.")
|
| 112 |
+
# logger.error(f"Error details: {e}")
|
| 113 |
+
# logger.error(traceback.format_exc())
|
| 114 |
+
# raise
|
| 115 |
+
# except smtplib.SMTPException as e:
|
| 116 |
+
# logger.error("❌ General SMTP error occurred.")
|
| 117 |
+
# logger.error(f"Error details: {e}")
|
| 118 |
+
# logger.error(traceback.format_exc())
|
| 119 |
+
# raise
|
| 120 |
+
# except Exception as e:
|
| 121 |
+
# logger.error("❌ Unknown error occurred while sending verification email.")
|
| 122 |
+
# logger.error(f"Error details: {e}")
|
| 123 |
+
# logger.error(traceback.format_exc())
|
| 124 |
+
# raise
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
fernet = Fernet(FERNET_KEY.encode())
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def create_verification_token(user_id: str, expires_in_hours: int = 24) -> str:
|
| 131 |
+
"""Create encrypted token with expiry"""
|
| 132 |
+
payload = {
|
| 133 |
+
"sub": user_id,
|
| 134 |
+
"exp": (datetime.utcnow() + timedelta(hours=expires_in_hours)).timestamp(),
|
| 135 |
+
}
|
| 136 |
+
token = fernet.encrypt(json.dumps(payload).encode()).decode()
|
| 137 |
+
return token
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
async def verify_verification_token(token: str) -> str:
|
| 141 |
+
"""Verify encrypted token and extract user_id"""
|
| 142 |
+
try:
|
| 143 |
+
decrypted = fernet.decrypt(token.encode())
|
| 144 |
+
data = json.loads(decrypted.decode())
|
| 145 |
+
|
| 146 |
+
exp = datetime.fromtimestamp(data["exp"])
|
| 147 |
+
if datetime.utcnow() > exp:
|
| 148 |
+
raise ValueError("Verification link expired")
|
| 149 |
+
|
| 150 |
+
return data["sub"]
|
| 151 |
+
|
| 152 |
+
except InvalidToken:
|
| 153 |
+
raise ValueError("Invalid verification link")
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
bearer_scheme = HTTPBearer()
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
def get_current_user(
|
| 160 |
+
credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme),
|
| 161 |
+
):
|
| 162 |
+
"""Decode JWT token and extract current user ID"""
|
| 163 |
+
token = credentials.credentials
|
| 164 |
+
|
| 165 |
+
try:
|
| 166 |
+
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
| 167 |
+
user_id: str = payload.get("sub")
|
| 168 |
+
|
| 169 |
+
if user_id is None:
|
| 170 |
+
raise HTTPException(
|
| 171 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 172 |
+
detail="Invalid token: Missing user id",
|
| 173 |
+
)
|
| 174 |
+
return user_id
|
| 175 |
+
|
| 176 |
+
except JWTError:
|
| 177 |
+
raise HTTPException(
|
| 178 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 179 |
+
detail="Invalid or expired token",
|
| 180 |
+
)
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
async def get_current_active_user(
|
| 184 |
+
session: AsyncSession = Depends(get_async_session),
|
| 185 |
+
user_id: str = Depends(get_current_user),
|
| 186 |
+
) -> Users:
|
| 187 |
+
"""Return the full user model for the currently authenticated user."""
|
| 188 |
+
user = await session.get(Users, uuid.UUID(user_id))
|
| 189 |
+
if not user:
|
| 190 |
+
raise HTTPException(
|
| 191 |
+
status_code=status.HTTP_404_NOT_FOUND, detail="User not found"
|
| 192 |
+
)
|
| 193 |
+
if not user.is_verified:
|
| 194 |
+
raise HTTPException(
|
| 195 |
+
status_code=status.HTTP_403_FORBIDDEN, detail="User not verified"
|
| 196 |
+
)
|
| 197 |
+
return user
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def create_refresh_token(data: dict, expires_days: int = 7):
|
| 201 |
+
"""Create a long-lived JWT refresh token"""
|
| 202 |
+
to_encode = data.copy()
|
| 203 |
+
expire = datetime.utcnow() + timedelta(days=expires_days)
|
| 204 |
+
to_encode.update({"exp": expire, "type": "refresh"})
|
| 205 |
+
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
| 206 |
+
return encoded_jwt
|
src/chatbot/__init__.py
ADDED
|
File without changes
|
src/chatbot/config.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pydantic import BaseSettings
|
| 2 |
+
|
| 3 |
+
class HomeSettings(BaseSettings):
|
| 4 |
+
FEATURE_ENABLED: bool = True
|
| 5 |
+
|
| 6 |
+
home_settings = HomeSettings()
|
src/chatbot/constants.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
WELCOME_MESSAGE = "Welcome to Yuvabe's Home Screen"
|
| 2 |
+
EXIT_MESSAGE = "Thank You have a wonderful day"
|
src/chatbot/dependencies.py
ADDED
|
File without changes
|
src/chatbot/embedding.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import numpy as np
|
| 3 |
+
from typing import List
|
| 4 |
+
import onnxruntime as ort
|
| 5 |
+
from transformers import AutoTokenizer
|
| 6 |
+
from huggingface_hub import hf_hub_download
|
| 7 |
+
|
| 8 |
+
MODEL_ID = "onnx-community/embeddinggemma-300m-ONNX"
|
| 9 |
+
|
| 10 |
+
class EmbeddingModel:
|
| 11 |
+
def __init__(self):
|
| 12 |
+
print("Loading tokenizer…")
|
| 13 |
+
self.tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)
|
| 14 |
+
|
| 15 |
+
print("Downloading ONNX model files…")
|
| 16 |
+
|
| 17 |
+
self.model_path = hf_hub_download(
|
| 18 |
+
repo_id=MODEL_ID,
|
| 19 |
+
filename="onnx/model.onnx"
|
| 20 |
+
)
|
| 21 |
+
self.data_path = hf_hub_download(
|
| 22 |
+
repo_id=MODEL_ID,
|
| 23 |
+
filename="onnx/model.onnx_data"
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
model_dir = os.path.dirname(self.model_path)
|
| 27 |
+
|
| 28 |
+
print("Creating inference session…")
|
| 29 |
+
self.session = ort.InferenceSession(
|
| 30 |
+
self.model_path,
|
| 31 |
+
providers=["CPUExecutionProvider"],
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
self.input_names = [i.name for i in self.session.get_inputs()]
|
| 35 |
+
self.output_names = [o.name for o in self.session.get_outputs()]
|
| 36 |
+
|
| 37 |
+
async def embed_text(self, text: str, max_length=512) -> List[float]:
|
| 38 |
+
|
| 39 |
+
encoded = self.tokenizer(
|
| 40 |
+
text,
|
| 41 |
+
truncation=True,
|
| 42 |
+
padding=True,
|
| 43 |
+
max_length=max_length,
|
| 44 |
+
return_tensors="np",
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
input_ids = encoded["input_ids"].astype(np.int64)
|
| 48 |
+
attention_mask = encoded["attention_mask"].astype(np.int64)
|
| 49 |
+
|
| 50 |
+
outputs = self.session.run(
|
| 51 |
+
self.output_names,
|
| 52 |
+
{
|
| 53 |
+
self.input_names[0]: input_ids,
|
| 54 |
+
self.input_names[1]: attention_mask,
|
| 55 |
+
},
|
| 56 |
+
)
|
| 57 |
+
last_hidden = outputs[0]
|
| 58 |
+
|
| 59 |
+
mask = attention_mask[..., None]
|
| 60 |
+
pooled = (last_hidden * mask).sum(axis=1) / mask.sum(axis=1)
|
| 61 |
+
|
| 62 |
+
vec = pooled[0]
|
| 63 |
+
|
| 64 |
+
norm = np.linalg.norm(vec)
|
| 65 |
+
if norm > 0:
|
| 66 |
+
vec = vec / norm
|
| 67 |
+
|
| 68 |
+
return vec.tolist()
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
embedding_model = EmbeddingModel()
|
src/chatbot/exceptions.py
ADDED
|
File without changes
|
src/chatbot/models.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uuid
|
| 2 |
+
from datetime import datetime
|
| 3 |
+
from typing import List
|
| 4 |
+
|
| 5 |
+
from pgvector.sqlalchemy import Vector
|
| 6 |
+
from sqlalchemy import Column
|
| 7 |
+
from sqlmodel import Field, Relationship, SQLModel, ForeignKey
|
| 8 |
+
|
| 9 |
+
from sqlalchemy.dialects.postgresql import UUID
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class KnowledgeBase(SQLModel, table=True):
|
| 13 |
+
__tablename__ = "knowledge_base"
|
| 14 |
+
id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
|
| 15 |
+
name: str = Field(nullable=False)
|
| 16 |
+
description: str | None = None
|
| 17 |
+
created_at: datetime = Field(default_factory=datetime.now)
|
| 18 |
+
knowledge_chunk: List["KnowledgeChunk"] = Relationship(
|
| 19 |
+
back_populates="knowledge_base"
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class KnowledgeChunk(SQLModel, table=True):
|
| 24 |
+
__tablename__ = "knowledge_chunk"
|
| 25 |
+
id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
|
| 26 |
+
kb_id: uuid.UUID = Field(
|
| 27 |
+
sa_column=Column(UUID(as_uuid=True),
|
| 28 |
+
ForeignKey("knowledge_base.id", ondelete="CASCADE"),
|
| 29 |
+
nullable=False
|
| 30 |
+
)
|
| 31 |
+
)
|
| 32 |
+
chunk_index: int
|
| 33 |
+
chunk_text: str
|
| 34 |
+
image_url: str | None = Field(default=None)
|
| 35 |
+
embedding: List[float] = Field(sa_column=Column(Vector(768)))
|
| 36 |
+
knowledge_base: "KnowledgeBase" = Relationship(back_populates="knowledge_chunk")
|
src/chatbot/router.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
import tempfile
|
| 4 |
+
from typing import Optional
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile
|
| 7 |
+
from sqlalchemy import text
|
| 8 |
+
from sqlmodel.ext.asyncio.session import AsyncSession
|
| 9 |
+
|
| 10 |
+
from src.core.database import get_async_session
|
| 11 |
+
from .schemas import ManualTextRequest
|
| 12 |
+
from .service import store_manual_text
|
| 13 |
+
from .embedding import embedding_model
|
| 14 |
+
from .schemas import (
|
| 15 |
+
SemanticSearchRequest,
|
| 16 |
+
SemanticSearchResult,
|
| 17 |
+
TokenizeRequest,
|
| 18 |
+
TokenizeResponse,
|
| 19 |
+
UploadKBResponse,
|
| 20 |
+
)
|
| 21 |
+
from .service import process_pdf_and_store
|
| 22 |
+
|
| 23 |
+
router = APIRouter(prefix="/chatbot", tags=["chatbot"])
|
| 24 |
+
|
| 25 |
+
@router.post("/tokenize", response_model=TokenizeResponse)
|
| 26 |
+
async def tokenize_text(payload: TokenizeRequest):
|
| 27 |
+
try:
|
| 28 |
+
encoded = embedding_model.tokenizer(
|
| 29 |
+
payload.text,
|
| 30 |
+
return_tensors="np",
|
| 31 |
+
truncation=True,
|
| 32 |
+
padding="longest",
|
| 33 |
+
max_length=512,
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
return TokenizeResponse(
|
| 37 |
+
input_ids=encoded["input_ids"][0].tolist(),
|
| 38 |
+
attention_mask=encoded["attention_mask"][0].tolist(),
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
except Exception as e:
|
| 42 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
@router.post("/semantic-search", response_model=list[SemanticSearchResult])
|
| 46 |
+
async def semantic_search(
|
| 47 |
+
payload: SemanticSearchRequest, session: AsyncSession = Depends(get_async_session)
|
| 48 |
+
):
|
| 49 |
+
|
| 50 |
+
if len(payload.embedding) == 0:
|
| 51 |
+
raise HTTPException(status_code=400, detail="Embedding cannot be empty.")
|
| 52 |
+
|
| 53 |
+
q_vector = payload.embedding
|
| 54 |
+
top_k = payload.top_k or 3
|
| 55 |
+
|
| 56 |
+
q_vector_str = "[" + ",".join(str(x) for x in q_vector) + "]"
|
| 57 |
+
|
| 58 |
+
sql = text(
|
| 59 |
+
"""
|
| 60 |
+
SELECT id, kb_id, chunk_text,image_url,
|
| 61 |
+
embedding <#> :query_vec AS score
|
| 62 |
+
FROM knowledge_chunk
|
| 63 |
+
ORDER BY embedding <#> :query_vec ASC
|
| 64 |
+
LIMIT :top_k
|
| 65 |
+
"""
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
result = await session.execute(
|
| 69 |
+
sql, {"query_vec": q_vector_str, "top_k": top_k}
|
| 70 |
+
)
|
| 71 |
+
rows = result.fetchall()
|
| 72 |
+
|
| 73 |
+
return [
|
| 74 |
+
SemanticSearchResult(
|
| 75 |
+
chunk_id=str(r.id),
|
| 76 |
+
kb_id=str(r.kb_id),
|
| 77 |
+
text=r.chunk_text,
|
| 78 |
+
image_url=r.image_url,
|
| 79 |
+
score=float(r.score),
|
| 80 |
+
)
|
| 81 |
+
for r in rows
|
| 82 |
+
]
|
| 83 |
+
|
| 84 |
+
# before hitting this endpoint make sure the model.data & model.onnx_data is available on the asset/onnx folder
|
| 85 |
+
# @router.post("/upload-pdf", response_model=UploadKBResponse)
|
| 86 |
+
# async def upload_pdf(
|
| 87 |
+
# file: UploadFile = File(...),
|
| 88 |
+
# name: str = Form(...),
|
| 89 |
+
# description: Optional[str] = Form(None),
|
| 90 |
+
# session: AsyncSession = Depends(get_async_session),
|
| 91 |
+
# ):
|
| 92 |
+
# if not file.filename.endswith(".pdf"):
|
| 93 |
+
# raise HTTPException(
|
| 94 |
+
# status_code=400, detail="Only PDF files are supported for now."
|
| 95 |
+
# )
|
| 96 |
+
|
| 97 |
+
# tmp_dir = tempfile.mkdtemp()
|
| 98 |
+
# tmp_path = os.path.join(tmp_dir, file.filename)
|
| 99 |
+
# try:
|
| 100 |
+
# with open(tmp_path, "wb") as out_f:
|
| 101 |
+
# shutil.copyfileobj(file.file, out_f)
|
| 102 |
+
|
| 103 |
+
# with open(tmp_path, "rb") as fobj:
|
| 104 |
+
# result = await process_pdf_and_store(fobj, name, description, session)
|
| 105 |
+
|
| 106 |
+
# return UploadKBResponse(
|
| 107 |
+
# kb_id=result["kb_id"],
|
| 108 |
+
# name=result["name"],
|
| 109 |
+
# chunks_stored=result["chunks_stored"],
|
| 110 |
+
# )
|
| 111 |
+
# finally:
|
| 112 |
+
# try:
|
| 113 |
+
# os.remove(tmp_path)
|
| 114 |
+
# os.rmdir(tmp_dir)
|
| 115 |
+
# except Exception:
|
| 116 |
+
# pass
|
| 117 |
+
|
| 118 |
+
# @router.post("/manual-add-chunk")
|
| 119 |
+
# async def manual_add_chunk(
|
| 120 |
+
# payload: ManualTextRequest,
|
| 121 |
+
# session: AsyncSession = Depends(get_async_session)
|
| 122 |
+
# ):
|
| 123 |
+
# return await store_manual_text(
|
| 124 |
+
# kb_id=payload.kb_id,
|
| 125 |
+
# text=payload.text,
|
| 126 |
+
# session=session
|
| 127 |
+
# )
|
| 128 |
+
|
| 129 |
+
# @router.post("/test-semantic", response_model=list[SemanticSearchResult])
|
| 130 |
+
# async def test_semantic(
|
| 131 |
+
# query: str,
|
| 132 |
+
# top_k: int = 3,
|
| 133 |
+
# session: AsyncSession = Depends(get_async_session)
|
| 134 |
+
# ):
|
| 135 |
+
|
| 136 |
+
# embedding = await embedding_model.embed_text(query)
|
| 137 |
+
|
| 138 |
+
# q_vec = "[" + ",".join(map(str, embedding)) + "]"
|
| 139 |
+
|
| 140 |
+
# sql = text("""
|
| 141 |
+
# SELECT id, kb_id, chunk_text,
|
| 142 |
+
# embedding <#> :vec AS score
|
| 143 |
+
# FROM knowledge_chunk
|
| 144 |
+
# ORDER BY embedding <#> :vec ASC
|
| 145 |
+
# LIMIT :k
|
| 146 |
+
# """)
|
| 147 |
+
|
| 148 |
+
# result = await session.execute(sql, {"vec": q_vec, "k": top_k})
|
| 149 |
+
# rows = result.fetchall()
|
| 150 |
+
|
| 151 |
+
# return [
|
| 152 |
+
# SemanticSearchResult(
|
| 153 |
+
# chunk_id=str(r.id),
|
| 154 |
+
# kb_id=str(r.kb_id),
|
| 155 |
+
# text=r.chunk_text,
|
| 156 |
+
# score=float(r.score),
|
| 157 |
+
# )
|
| 158 |
+
# for r in rows
|
| 159 |
+
# ]
|
src/chatbot/schemas.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uuid
|
| 2 |
+
from typing import List, Optional
|
| 3 |
+
|
| 4 |
+
from pydantic import BaseModel
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class UploadKBResponse(BaseModel):
|
| 8 |
+
kb_id: uuid.UUID
|
| 9 |
+
name: str
|
| 10 |
+
chunks_stored: int
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class UploadKBRequest(BaseModel):
|
| 14 |
+
name: str
|
| 15 |
+
description: Optional[str] = None
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class TokenizeRequest(BaseModel):
|
| 19 |
+
text: str
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class TokenizeResponse(BaseModel):
|
| 23 |
+
input_ids: List[int]
|
| 24 |
+
attention_mask: List[int]
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class SemanticSearchRequest(BaseModel):
|
| 28 |
+
embedding: List[float]
|
| 29 |
+
top_k: Optional[int] = 3
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class SemanticSearchResult(BaseModel):
|
| 33 |
+
chunk_id: str
|
| 34 |
+
kb_id: str
|
| 35 |
+
text: str
|
| 36 |
+
image_url: str | None = None
|
| 37 |
+
score: float
|
| 38 |
+
|
| 39 |
+
class ManualTextRequest(BaseModel):
|
| 40 |
+
kb_id: uuid.UUID
|
| 41 |
+
text: str
|
src/chatbot/service.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from sqlmodel.ext.asyncio.session import AsyncSession
|
| 4 |
+
from sqlmodel import select
|
| 5 |
+
from .embedding import embedding_model
|
| 6 |
+
from .models import KnowledgeBase, KnowledgeChunk
|
| 7 |
+
from .utils import (
|
| 8 |
+
chunk_sentences_with_overlap,
|
| 9 |
+
extract_text_from_pdf_fileobj,
|
| 10 |
+
split_into_sentences,
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
DEFAULT_MAX_WORDS = int(os.getenv("CHUNK_MAX_WORDS", "200"))
|
| 14 |
+
DEFAULT_OVERLAP = int(os.getenv("CHUNK_OVERLAP_WORDS", "40"))
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
async def process_pdf_and_store(
|
| 18 |
+
fileobj, kb_name: str, kb_description: str | None, session: AsyncSession
|
| 19 |
+
):
|
| 20 |
+
raw_text = extract_text_from_pdf_fileobj(fileobj)
|
| 21 |
+
|
| 22 |
+
sentences = split_into_sentences(raw_text)
|
| 23 |
+
|
| 24 |
+
chunks = chunk_sentences_with_overlap(
|
| 25 |
+
sentences, max_words=DEFAULT_MAX_WORDS, overlap_words=DEFAULT_OVERLAP
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
kb = KnowledgeBase(name=kb_name, description=kb_description)
|
| 29 |
+
session.add(kb)
|
| 30 |
+
await session.commit()
|
| 31 |
+
await session.refresh(kb)
|
| 32 |
+
|
| 33 |
+
chunk_objs = []
|
| 34 |
+
for idx, chunk_text in enumerate(chunks):
|
| 35 |
+
emb = await embedding_model.embed_text(chunk_text)
|
| 36 |
+
|
| 37 |
+
chunk = KnowledgeChunk(
|
| 38 |
+
kb_id=kb.id, chunk_index=idx, chunk_text=chunk_text, embedding=emb
|
| 39 |
+
)
|
| 40 |
+
session.add(chunk)
|
| 41 |
+
chunk_objs.append(chunk)
|
| 42 |
+
|
| 43 |
+
await session.commit()
|
| 44 |
+
|
| 45 |
+
return {"kb_id": kb.id, "name": kb_name, "chunks_stored": len(chunk_objs)}
|
| 46 |
+
|
| 47 |
+
async def store_manual_text(kb_id: UUID, text: str, session: AsyncSession):
|
| 48 |
+
embedding = await embedding_model.embed_text(text)
|
| 49 |
+
|
| 50 |
+
result = await session.execute(
|
| 51 |
+
select(KnowledgeChunk).where(KnowledgeChunk.kb_id == kb_id)
|
| 52 |
+
)
|
| 53 |
+
existing = result.scalars().all()
|
| 54 |
+
next_index = len(existing)
|
| 55 |
+
|
| 56 |
+
new_chunk = KnowledgeChunk(
|
| 57 |
+
kb_id=kb_id,
|
| 58 |
+
chunk_index=next_index,
|
| 59 |
+
chunk_text=text,
|
| 60 |
+
embedding=embedding
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
session.add(new_chunk)
|
| 64 |
+
await session.commit()
|
| 65 |
+
|
| 66 |
+
return {
|
| 67 |
+
"kb_id": kb_id,
|
| 68 |
+
"chunk_index": next_index,
|
| 69 |
+
"status": "stored",
|
| 70 |
+
"text": text
|
| 71 |
+
}
|
src/chatbot/utils.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
from typing import List
|
| 3 |
+
import PyPDF2
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def clean_text(text: str) -> str:
|
| 7 |
+
text = re.sub(r'\s+', ' ', text)
|
| 8 |
+
text = re.sub(r'\s+([,.!?;:])', r'\1', text)
|
| 9 |
+
text = re.sub(r'[_\-]{2,}', ' ', text)
|
| 10 |
+
text = re.sub(r'\.{2,}', '.', text)
|
| 11 |
+
text = re.sub(r'\s{2,}', ' ', text)
|
| 12 |
+
return text.strip()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def extract_text_from_pdf_fileobj(fileobj) -> str:
|
| 16 |
+
reader = PyPDF2.PdfReader(fileobj)
|
| 17 |
+
all_text = []
|
| 18 |
+
for page in reader.pages:
|
| 19 |
+
page_text = page.extract_text()
|
| 20 |
+
if page_text:
|
| 21 |
+
all_text.append(page_text)
|
| 22 |
+
return clean_text(" ".join(all_text))
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def split_into_sentences(text: str) -> List[str]:
|
| 26 |
+
sentence_endings = re.compile(r'(?<=[.!?])\s+')
|
| 27 |
+
sentences = sentence_endings.split(text)
|
| 28 |
+
return [s.strip() for s in sentences if s.strip()]
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def chunk_sentences_with_overlap(sentences: List[str], max_words: int = 200, overlap_words: int = 40) -> List[str]:
|
| 32 |
+
chunks = []
|
| 33 |
+
current = []
|
| 34 |
+
current_len = 0
|
| 35 |
+
|
| 36 |
+
for sentence in sentences:
|
| 37 |
+
words = sentence.split()
|
| 38 |
+
wc = len(words)
|
| 39 |
+
|
| 40 |
+
if current_len + wc > max_words and current:
|
| 41 |
+
chunks.append(" ".join(current))
|
| 42 |
+
|
| 43 |
+
if overlap_words > 0:
|
| 44 |
+
last_words = " ".join(" ".join(current).split()[-overlap_words:])
|
| 45 |
+
current = [last_words] if last_words else []
|
| 46 |
+
current_len = len(last_words.split())
|
| 47 |
+
else:
|
| 48 |
+
current = []
|
| 49 |
+
current_len = 0
|
| 50 |
+
|
| 51 |
+
current.append(sentence)
|
| 52 |
+
current_len += wc
|
| 53 |
+
|
| 54 |
+
if current:
|
| 55 |
+
chunks.append(" ".join(current))
|
| 56 |
+
|
| 57 |
+
return chunks
|
src/core/__init__.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from src.auth import models as auth_models
|
| 2 |
+
from src.chatbot import models as chatbot_models
|
| 3 |
+
from src.core import models as core_models
|
| 4 |
+
from src.feed import models as feed_models
|
| 5 |
+
from src.home import models as home_models
|
| 6 |
+
from src.profile import models as profile_models
|
| 7 |
+
from src.wellbeing import models as wellbeing_models
|
| 8 |
+
from src.payslip import models as payslip_models
|
| 9 |
+
from src.journaling import models as journaling_models
|
src/core/config.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional
|
| 2 |
+
from pydantic import PostgresDsn, computed_field
|
| 3 |
+
from pydantic_settings import BaseSettings, SettingsConfigDict
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class SMTPConfig(BaseSettings):
|
| 7 |
+
server: str
|
| 8 |
+
port: int
|
| 9 |
+
username: str
|
| 10 |
+
password: str
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class Settings(BaseSettings):
|
| 14 |
+
|
| 15 |
+
JWT_ALGORITHM: str
|
| 16 |
+
JWT_EXPIRE: int
|
| 17 |
+
SECRET_KEY: str
|
| 18 |
+
|
| 19 |
+
POSTGRES_USER: str
|
| 20 |
+
POSTGRES_PASSWORD: str
|
| 21 |
+
POSTGRES_HOST: str
|
| 22 |
+
POSTGRES_DB: str
|
| 23 |
+
|
| 24 |
+
APP_NAME: str
|
| 25 |
+
ENV: str
|
| 26 |
+
DEBUG: bool
|
| 27 |
+
PORT: int
|
| 28 |
+
|
| 29 |
+
EMAIL_SERVER: str
|
| 30 |
+
EMAIL_PORT: int
|
| 31 |
+
EMAIL_USERNAME: str
|
| 32 |
+
EMAIL_PASSWORD: str
|
| 33 |
+
|
| 34 |
+
FERNET_KEY: str
|
| 35 |
+
VERIFICATION_BASE_URL: str
|
| 36 |
+
|
| 37 |
+
GOOGLE_CLIENT_ID: str
|
| 38 |
+
GOOGLE_CLIENT_SECRET: str
|
| 39 |
+
GOOGLE_REDIRECT_URI: str
|
| 40 |
+
|
| 41 |
+
FCM_SERVER_KEY: Optional[str] = None
|
| 42 |
+
SICK_LEAVE_LIMIT: int = 10
|
| 43 |
+
CASUAL_LEAVE_LIMIT: int = 10
|
| 44 |
+
|
| 45 |
+
AUTH_BASE: str = "https://accounts.google.com/o/oauth2/v2/auth"
|
| 46 |
+
TOKEN_URL: str = "https://oauth2.googleapis.com/token"
|
| 47 |
+
GMAIL_SEND_SCOPE: str = "https://www.googleapis.com/auth/gmail.send"
|
| 48 |
+
|
| 49 |
+
FIREBASE_TYPE: str
|
| 50 |
+
FIREBASE_PROJECT_ID: str
|
| 51 |
+
FIREBASE_PRIVATE_KEY_ID: str
|
| 52 |
+
FIREBASE_PRIVATE_KEY: str
|
| 53 |
+
FIREBASE_CLIENT_EMAIL: str
|
| 54 |
+
FIREBASE_CLIENT_ID: str
|
| 55 |
+
FIREBASE_AUTH_URI: str
|
| 56 |
+
FIREBASE_TOKEN_URI: str
|
| 57 |
+
FIREBASE_AUTH_PROVIDER_X509_CERT_URL: str
|
| 58 |
+
FIREBASE_CLIENT_X509_CERT_URL: str
|
| 59 |
+
FIREBASE_UNIVERSE_DOMAIN: str
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
@computed_field
|
| 63 |
+
@property
|
| 64 |
+
def DATABASE_URL(self) -> PostgresDsn:
|
| 65 |
+
"""Sync DB URL"""
|
| 66 |
+
return f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@{self.POSTGRES_HOST}/{self.POSTGRES_DB}"
|
| 67 |
+
|
| 68 |
+
@computed_field
|
| 69 |
+
@property
|
| 70 |
+
def ASYNC_DATABASE_URL(self) -> PostgresDsn:
|
| 71 |
+
"""Async DB URL"""
|
| 72 |
+
return f"postgresql+asyncpg://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@{self.POSTGRES_HOST}/{self.POSTGRES_DB}"
|
| 73 |
+
|
| 74 |
+
model_config = SettingsConfigDict(
|
| 75 |
+
env_file=".env", case_sensitive=False, env_file_encoding="utf-8"
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
settings = Settings()
|