images uploaded and pub sub + cloud function triggered

This commit is contained in:
johnpccd 2025-05-25 00:03:43 +02:00
parent 77bf5125fb
commit 504d9d8bc0
2 changed files with 169 additions and 56 deletions

View File

@ -13,6 +13,8 @@ from datetime import datetime, timedelta
import secrets
import hashlib
import json
from io import BytesIO
from PIL import Image
from dotenv import load_dotenv
@ -32,6 +34,8 @@ from src.db.repositories.firestore_user_repository import firestore_user_reposit
from src.db.repositories.firestore_api_key_repository import firestore_api_key_repository
from src.db.repositories.firestore_image_repository import firestore_image_repository
from src.auth.security import hash_api_key as app_hash_api_key
from src.services.storage import StorageService
from src.services.pubsub_service import pubsub_service
# Configure logging
logging.basicConfig(
@ -47,6 +51,54 @@ class CustomJSONEncoder(json.JSONEncoder):
return str(obj)
return super().default(obj)
# Initialize services
storage_service = StorageService()
def create_sample_image(width=800, height=600, color=(100, 150, 200), filename="sample.jpg"):
"""Create a sample image for testing"""
# Create a new image with the specified color
img = Image.new('RGB', (width, height), color)
# Add some simple graphics to make it more interesting
from PIL import ImageDraw, ImageFont
draw = ImageDraw.Draw(img)
# Draw some rectangles
draw.rectangle([50, 50, width-50, height-50], outline=(255, 255, 255), width=3)
draw.rectangle([100, 100, width-100, height-100], outline=(255, 255, 0), width=2)
# Add text
try:
# Try to use a default font
font = ImageFont.load_default()
text = f"Sample Image\n{width}x{height}"
draw.text((width//2 - 50, height//2 - 20), text, fill=(255, 255, 255), font=font)
except:
# If font loading fails, just draw without text
pass
# Save to BytesIO
img_bytes = BytesIO()
img.save(img_bytes, format='JPEG', quality=85)
img_bytes.seek(0)
return img_bytes
class MockUploadFile:
"""Mock UploadFile class to simulate FastAPI's UploadFile"""
def __init__(self, content: BytesIO, filename: str, content_type: str):
self.file = content
self.filename = filename
self.content_type = content_type
self._position = 0
async def read(self, size: int = -1) -> bytes:
return self.file.read(size)
async def seek(self, position: int) -> None:
self.file.seek(position)
self._position = position
def generate_api_key(team_id=None, user_id=None):
"""Generate a random API key using the same format as the application"""
# Generate a random key prefix (visible part)
@ -237,69 +289,122 @@ async def seed_api_keys(user_ids, team_ids):
return generated_keys
async def seed_images(team_ids, user_ids):
"""Seed the database with image metadata"""
logger.info("Seeding images...")
"""Seed the database with real image uploads using the application's business logic"""
logger.info("Seeding images with real uploads...")
images_data = [
# Define sample images to create and upload
sample_images = [
{
"filename": "image1.jpg",
"original_filename": "product_photo.jpg",
"file_size": 1024 * 1024, # 1MB
"content_type": "image/jpeg",
"storage_path": "teams/{}/images/image1.jpg".format(team_ids[0]),
"public_url": "https://storage.googleapis.com/example-bucket/teams/{}/images/image1.jpg".format(team_ids[0]),
"team_id": team_ids[0],
"uploader_id": user_ids[0],
"filename": "product_photo.jpg",
"description": "Product photo for marketing",
"tags": ["product", "marketing", "high-resolution"],
"metadata": {
"team_idx": 0,
"user_idx": 0,
"width": 1920,
"height": 1080,
"color_space": "sRGB"
}
"color": (70, 130, 180) # Steel blue
},
{
"filename": "image2.png",
"original_filename": "logo.png",
"file_size": 512 * 1024, # 512KB
"content_type": "image/png",
"storage_path": "teams/{}/images/image2.png".format(team_ids[1]),
"public_url": "https://storage.googleapis.com/example-bucket/teams/{}/images/image2.png".format(team_ids[1]),
"team_id": team_ids[1],
"uploader_id": user_ids[2],
"filename": "company_logo.png",
"description": "Company logo",
"tags": ["logo", "branding"],
"metadata": {
"team_idx": 1,
"user_idx": 2,
"width": 800,
"height": 600,
"color_space": "sRGB"
}
"color": (255, 165, 0) # Orange
},
{
"filename": "image3.jpg",
"original_filename": "support_screenshot.jpg",
"file_size": 256 * 1024, # 256KB
"content_type": "image/jpeg",
"storage_path": "teams/{}/images/image3.jpg".format(team_ids[2]),
"public_url": "https://storage.googleapis.com/example-bucket/teams/{}/images/image3.jpg".format(team_ids[2]),
"team_id": team_ids[2],
"uploader_id": user_ids[3],
"filename": "support_screenshot.jpg",
"description": "Screenshot for support ticket",
"tags": ["support", "screenshot", "bug"],
"metadata": {
"team_idx": 2,
"user_idx": 3,
"width": 1280,
"height": 720,
"color_space": "sRGB"
}
"color": (144, 238, 144) # Light green
}
]
image_ids = []
for image_data in images_data:
image = ImageModel(**image_data)
for img_config in sample_images:
try:
logger.info(f"Creating and uploading image: {img_config['filename']}")
# Create sample image
img_content = create_sample_image(
width=img_config['width'],
height=img_config['height'],
color=img_config['color'],
filename=img_config['filename']
)
# Create mock upload file
content_type = "image/jpeg" if img_config['filename'].endswith('.jpg') else "image/png"
mock_file = MockUploadFile(
content=img_content,
filename=img_config['filename'],
content_type=content_type
)
# Get team and user IDs
team_id = team_ids[img_config['team_idx']]
user_id = user_ids[img_config['user_idx']]
# Upload to storage using the actual StorageService
logger.info(f"Uploading {img_config['filename']} to Google Cloud Storage...")
storage_path, content_type, file_size, metadata = await storage_service.upload_file(
mock_file, str(team_id)
)
# Generate public URL
public_url = storage_service.generate_public_url(storage_path)
# Create image record using the actual business logic
image = ImageModel(
filename=img_config['filename'],
original_filename=img_config['filename'],
file_size=file_size,
content_type=content_type,
storage_path=storage_path,
public_url=public_url,
team_id=team_id,
uploader_id=user_id,
description=img_config['description'],
tags=img_config['tags'],
metadata=metadata
)
# Save to database
created_image = await firestore_image_repository.create(image)
image_ids.append(created_image.id)
logger.info(f"Created image: {created_image.filename} (ID: {created_image.id})")
logger.info(f"Created image record: {created_image.filename} (ID: {created_image.id})")
logger.info(f"Storage path: {storage_path}")
logger.info(f"Public URL: {public_url}")
# Publish image processing task to Pub/Sub (this triggers Cloud Run)
try:
logger.info(f"Publishing image processing task to Pub/Sub for image {created_image.id}...")
task_published = await pubsub_service.publish_image_processing_task(
image_id=str(created_image.id),
storage_path=storage_path,
team_id=str(team_id)
)
if task_published:
logger.info(f"✅ Successfully published processing task for image {created_image.id}")
else:
logger.warning(f"❌ Failed to publish processing task for image {created_image.id}")
except Exception as e:
logger.warning(f"❌ Failed to publish image processing task: {e}")
except Exception as e:
logger.error(f"Error creating image {img_config['filename']}: {e}")
raise
logger.info(f"Successfully seeded {len(image_ids)} images with real uploads!")
logger.info("Images uploaded to Google Cloud Storage and processing tasks sent to Cloud Run")
return image_ids

View File

@ -101,8 +101,9 @@ class StorageService:
try:
# Extract image metadata if it's an image
if content_type and content_type.startswith('image/'):
# Create a temporary file to read with PIL
with Image.open(BinaryIO(content)) as img:
# Create a BytesIO object to read with PIL
from io import BytesIO
with Image.open(BytesIO(content)) as img:
metadata = {
'width': img.width,
'height': img.height,
@ -122,10 +123,9 @@ class StorageService:
# Upload the file
blob.upload_from_string(content, content_type=content_type)
# Make the blob publicly readable
blob.make_public()
logger.info(f"File uploaded and made public: {storage_path}")
# Note: Skip making blob public due to uniform bucket-level access
# The bucket should be configured with public access at the bucket level
logger.info(f"File uploaded: {storage_path}")
# Seek back to the beginning for future reads
await file.seek(0)
@ -242,9 +242,17 @@ class StorageService:
logger.warning(f"File not found for making public: {storage_path}")
return False
# Try to make public, but handle uniform bucket-level access gracefully
try:
blob.make_public()
logger.info(f"File made public: {storage_path}")
return True
except Exception as acl_error:
if "uniform bucket-level access" in str(acl_error):
logger.info(f"File is already public due to uniform bucket-level access: {storage_path}")
return True
else:
raise acl_error
except Exception as e:
logger.error(f"Error making file public: {e}")
raise