Skip to content

74-sort-card #87

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Apr 23, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions backend/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ COPY . .
# ポート5000を公開
EXPOSE 5000

# 環境変数の設定(本番環境)
ENV FLASK_ENV=production
# 環境変数の設定 dev -> 1, pd -> 0
ENV FLASK_DEBUG=1

# GunicornでFlaskアプリを起動
CMD ["gunicorn", "-b", "0.0.0.0:5000", "app:app"]
14 changes: 14 additions & 0 deletions backend/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,20 @@ $$;
CREATE TRIGGER sync_auth_users
AFTER INSERT OR UPDATE OR DELETE ON auth.users
FOR EACH ROW EXECUTE PROCEDURE public.sync_auth_users_to_public_users();

--- Function to sync update_at to datetime now
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER set_updated_at
BEFORE UPDATE ON dreams
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();

```

`seeds` 以下のスクリプトを実行してサンプルデータを追加する.
Expand Down
2 changes: 2 additions & 0 deletions backend/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
load_dotenv()

app = Flask(__name__)
# 環境設定
app.debug = os.environ.get("FLASK_DEBUG") == "1"
# ログの出力設定
setup_logger(app)
# CORSのセットアップ
Expand Down
2 changes: 2 additions & 0 deletions backend/config/logging_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ def filter(self, record: logging.LogRecord) -> bool:

# ロガーのセットアップ
def setup_logger(app: Flask):
if app.config["ENV"] == "development":
return # ← ローカルなら何も設定しない
# 既存ロガーの削除
for h in app.logger.handlers:
app.logger.removeHandler(h)
Expand Down
3 changes: 3 additions & 0 deletions backend/pytest.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[pytest]
pythonpath = .
testpaths = test
3 changes: 2 additions & 1 deletion backend/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@ Flask-JWT-Extended~=4.7.1
supabase==2.13.0
gunicorn==23.0.0
pydantic==2.10.6
rich==13.9.4
rich==13.9.4
pytest==8.3.5
87 changes: 87 additions & 0 deletions backend/test/integration/sort_time_integration_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
import time
from datetime import UTC, datetime

import pytest
from models.db import get_supabase_client
from supabase import Client

supabase: Client = get_supabase_client()

# testuser1 の user_id(Supabaseで確認して値を設定)
TEST_USER_ID = "d7fc2a83-3046-48f1-94e7-bfd9a6b9ba3e"


@pytest.mark.skipif(supabase is None, reason="Supabase client not initialized")
def test_updated_at_and_sorting_behavior():
# --- 0. テスト用レコードを作成 ---
unique_content = f"pytest content {datetime.now(UTC).isoformat()}"
insert_res = (
supabase.table("dreams")
.insert({"user_id": TEST_USER_ID, "content": unique_content, "is_public": True})
.execute()
)

assert insert_res.data, "❌ レコードの作成に失敗"
dream_id = insert_res.data[0]["id"]

try:
# --- 1. 更新前の updated_at を取得 ---
before_res = (
supabase.table("dreams").select("updated_at").eq("id", dream_id).execute()
)
assert before_res.data, "❌ レコードが存在しません"
before_time = datetime.fromisoformat(before_res.data[0]["updated_at"])

# --- 2. スリープ ---
time.sleep(1)

# --- 3. content をユニークな文字列に更新 ---
new_content = f"pytest content {datetime.now(UTC).isoformat()}"
update_res = (
supabase.table("dreams")
.update({"content": new_content})
.eq("id", dream_id)
.execute()
)
assert update_res.data, "❌ レコードの変更に失敗"

# --- 4. updated_at が更新されているか確認 ---
after_res = (
supabase.table("dreams").select("updated_at").eq("id", dream_id).execute()
)
after_time = datetime.fromisoformat(after_res.data[0]["updated_at"])

assert after_time > before_time, (
f"❌ updated_at が更新されていません: {before_time} vs {after_time}"
)

# --- 5. updated_at による降順ソートの確認 ---
sorted_res = (
supabase.table("dreams")
.select("id, updated_at")
.order("updated_at", desc=True)
.execute()
)
sorted_ids = [record["id"] for record in sorted_res.data]

assert dream_id == sorted_ids[0], f"❌ ソート順が正しくない: {sorted_ids}"
print(f"✅ updated_at ソート順 OK, 最新ID = {sorted_ids[0]}")

# --- 6. likes によるソートの検証(降順) ---
like_sorted_res = (
supabase.table("dreams")
.select("id, likes")
.order("likes", desc=True)
.execute()
)
like_counts = [record["likes"] for record in like_sorted_res.data]

assert like_counts == sorted(like_counts, reverse=True), (
f"❌ likes の降順ソートが正しくありません: {like_counts}"
)
print(f"✅ likes ソート順 OK: {like_counts}")

finally:
# --- 後始末(テストデータ削除) ---
supabase.table("dreams").delete().eq("id", dream_id).execute()
print("✅ テストデータの削除完了")